<!-- 
RSS generated by JIRA (9.7.1#970001-sha1:2222b88b221c4928ef0de3161136cc90c8356a66) at Thu Feb 08 08:55:19 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>MongoDB Jira</title>
    <link>https://jira.mongodb.org</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.7.1</version>
        <build-number>970001</build-number>
        <build-date>13-04-2023</build-date>
    </build-info>


<item>
            <title>[JAVA-1716] java.lang.IllegalStateException: open</title>
                <link>https://jira.mongodb.org/browse/JAVA-1716</link>
                <project id="10006" key="JAVA">Java Driver</project>
                    <description>&lt;p&gt;I am trying to integrate MongoDB with Apache Spark to process data.&lt;/p&gt;

&lt;p&gt;When trying to execute my program with this command (../spark-1.3.0-bin-hadoop2.4/bin/spark-submit --master spark://luis-VirtualBox:7077 --jars $(echo /home/luis/mongo-spark/lib/*.jar | tr &apos; &apos; &apos;,&apos;) --class JavaWordCount target/scala-2.10/mongo-spark_2.10-1.0.jar mydb.testCollection mydb.outputTest7) I get the following exception:&lt;/p&gt;

&lt;p&gt;15/03/23 17:05:34 WARN TaskSetManager: Lost task 0.1 in stage 0.0 (TID 4, 10.0.2.15): java.lang.IllegalStateException: open&lt;br/&gt;
	at org.bson.util.Assertions.isTrue(Assertions.java:36)&lt;br/&gt;
	at com.mongodb.DBTCPConnector.getPrimaryPort(DBTCPConnector.java:406)&lt;br/&gt;
	at com.mongodb.DBCollectionImpl.insert(DBCollectionImpl.java:184)&lt;br/&gt;
	at com.mongodb.DBCollectionImpl.insert(DBCollectionImpl.java:167)&lt;br/&gt;
	at com.mongodb.DBCollection.insert(DBCollection.java:161)&lt;br/&gt;
	at com.mongodb.DBCollection.insert(DBCollection.java:107)&lt;br/&gt;
	at com.mongodb.DBCollection.save(DBCollection.java:1049)&lt;br/&gt;
	at com.mongodb.DBCollection.save(DBCollection.java:1014)&lt;br/&gt;
	at com.mongodb.hadoop.output.MongoRecordWriter.write(MongoRecordWriter.java:105)&lt;br/&gt;
	at org.apache.spark.rdd.PairRDDFunctions$$anonfun$12.apply(PairRDDFunctions.scala:1000)&lt;br/&gt;
	at org.apache.spark.rdd.PairRDDFunctions$$anonfun$12.apply(PairRDDFunctions.scala:979)&lt;br/&gt;
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)&lt;br/&gt;
	at org.apache.spark.scheduler.Task.run(Task.scala:64)&lt;br/&gt;
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)&lt;br/&gt;
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)&lt;br/&gt;
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)&lt;br/&gt;
	at java.lang.Thread.run(Thread.java:745)&lt;/p&gt;

&lt;p&gt;I have read in some places that it is caused by a close connection, but I don&apos;t close it in any part of the code.&lt;/p&gt;

&lt;p&gt;Thank you in advance.&lt;/p&gt;</description>
                <environment>Ubuntu Linux 14.04, JDK 7</environment>
        <key id="191359">JAVA-1716</key>
            <summary>java.lang.IllegalStateException: open</summary>
                <type id="3" iconUrl="https://jira.mongodb.org/secure/viewavatar?size=xsmall&amp;avatarId=14718&amp;avatarType=issuetype">Task</type>
                                            <priority id="3" iconUrl="https://jira.mongodb.org/images/icons/priorities/major.svg">Major - P3</priority>
                        <status id="6" iconUrl="https://jira.mongodb.org/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="9">Done</resolution>
                                        <assignee username="-1">Unassigned</assignee>
                                    <reporter username="ltrejo">Luis Rodr&#237;guez</reporter>
                        <labels>
                            <label>apache-Spark</label>
                            <label>java</label>
                    </labels>
                <created>Mon, 23 Mar 2015 17:45:38 +0000</created>
                <updated>Wed, 11 Sep 2019 19:10:08 +0000</updated>
                            <resolved>Tue, 14 Apr 2015 11:58:18 +0000</resolved>
                                    <version>3.0.1</version>
                                                    <component>Connection Management</component>
                    <component>Error Handling</component>
                                        <votes>0</votes>
                                    <watches>7</watches>
                                                                                                                <comments>
                            <comment id="988910" author="zqwang" created="Sat, 25 Jul 2015 04:32:12 +0000"  >&lt;p&gt;Thank you!&lt;/p&gt;</comment>
                            <comment id="987957" author="ross@10gen.com" created="Fri, 24 Jul 2015 09:28:58 +0000"  >&lt;p&gt;This ticket is closed so no further action will be taken here.  I see you have also opened HADOOP-217 and the followup will be on that ticket.&lt;/p&gt;</comment>
                            <comment id="987767" author="zqwang" created="Fri, 24 Jul 2015 01:52:14 +0000"  >&lt;p&gt;Hi, I use  mongo-hadoop-core-1.3.2/1.3.1/1.4.0 and when I run app in Spark on standalone modal, it gets the same issue, but it runs well if I use local modal. I have been stuck in this issue several days. Please help me .&lt;/p&gt;</comment>
                            <comment id="880581" author="jeff.yemin" created="Tue, 14 Apr 2015 11:58:18 +0000"  >&lt;p&gt;Thanks for investigating.  I&apos;ll close this as Works as Designed then.&lt;/p&gt;</comment>
                            <comment id="880551" author="eyalz" created="Tue, 14 Apr 2015 10:24:52 +0000"  >&lt;p&gt;glad i could help. i think that the problem relates to the mongo hadoop integration and not to the mongo java driver&lt;/p&gt;</comment>
                            <comment id="880543" author="ltrejo" created="Tue, 14 Apr 2015 09:59:59 +0000"  >&lt;p&gt;Eyal Zituni, that was the problem! I downgraded to version 1.3.1 of the MongoDB-Hadoop Driver and now it works like a charm. Thank you very match! Should I close the issue?&lt;/p&gt;</comment>
                            <comment id="879669" author="eyalz" created="Mon, 13 Apr 2015 15:10:56 +0000"  >&lt;p&gt;I had the same issue. it seems like a problem which is caused by a new feature that has been added to version 1.32 of the mongo-hadoop-core driver.&lt;br/&gt;
a new mongo client pool has been added to the MongoConfigUtil class, &lt;br/&gt;
see - &lt;a href=&quot;https://github.com/mongodb/mongo-hadoop/commit/f8f98b1bef05579fce8ef46742e75cdb4d294d2f&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://github.com/mongodb/mongo-hadoop/commit/f8f98b1bef05579fce8ef46742e75cdb4d294d2f&lt;/a&gt;&lt;br/&gt;
i suspect that a problem with this pool might raised once 2 or more concurrent threads are required the same mongo client (identify by the uri), they will both acquire the same client instance and if one of them will finish before the 2nd has finished and the close method will be called by the 1st... the 2nd thread will loose the connection. &lt;br/&gt;
see the saveAsNewAPIHadoopDataset method in PairRDDFunctions.scala (963) which first obtain the mongo client (writer) and afterward calls the close.&lt;br/&gt;
&lt;a href=&quot;https://github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;*i believe that this will probably only occurs in frameworks such as spark which will run the driver in a multithreads environment (the MongoConfigUtil and the pool are statics)&lt;/p&gt;

&lt;p&gt;thanks&lt;/p&gt;

&lt;p&gt;Eyal&lt;/p&gt;

</comment>
                            <comment id="861226" author="jeff.yemin" created="Mon, 23 Mar 2015 18:14:35 +0000"  >&lt;p&gt;Yes, every time we&apos;ve seen this it&apos;s because a MongoClient instance was closed and then subsequently used.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10012">
                    <name>Related</name>
                                            <outwardlinks description="related to">
                                                        </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                                                                                                                                                                                                    <customfield id="customfield_15850" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                    <customfield id="customfield_12550" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>2|hs7dyn:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10558" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                            </customfields>
    </item>
</channel>
</rss>