-
Type: Bug
-
Resolution: Cannot Reproduce
-
Priority: Major - P3
-
None
-
Affects Version/s: None
-
Component/s: None
-
Labels:None
This works
val sparkSession: SparkSession = SparkSession.builder() .master("local") .appName("MongoSparkConnectorIntro") .config("spark.mongodb.input.uri", "mongodb://127.0.0.1/nasa.eva2") .config("spark.mongodb.output.uri", "mongodb://127.0.0.1/nasa.astronautTotals") .getOrCreate() val sqlContext: SQLContext = sparkSession.sqlContext val evadf: DataFrame = sqlContext.read.option("collection", "eva2").mongo() evadf.printSchema()
But this fails
val sparkSession: SparkSession = SparkSession.builder() .master("local") .appName("MongoSparkConnectorIntro") .config("spark.mongodb.input.uri", "mongodb://m103-admin:m103-pass@192.168.103.100:27000/nasa.eva2?authSource=admin") .config("spark.mongodb.output.uri", "mongodb://m103-admin:m103-pass@192.168.103.100:27000/nasa.astronautTotals?authSource=admin ") .getOrCreate() val sqlContext: SQLContext = sparkSession.sqlContext val evadf: DataFrame = sqlContext.read.option("collection", "eva2").mongo() evadf.printSchema()
Why?