java.lang.ExceptionInInitializerError

GitHub | a4712020502 | 10 months ago
tip
Do you know that we can give you better hits? Get more relevant results from Samebug’s stack trace search.

Root Cause Analysis

  1. java.lang.IllegalArgumentException

    state should be: w >= 0

    at com.mongodb.assertions.Assertions.isTrueArgument()
  2. MongoDB Java Driver
    WriteConcern.<init>
    1. com.mongodb.assertions.Assertions.isTrueArgument(Assertions.java:99)
    2. com.mongodb.WriteConcern.<init>(WriteConcern.java:316)
    3. com.mongodb.WriteConcern.<init>(WriteConcern.java:227)
    3 frames
  3. casbah-core
    MongoClient.apply
    1. com.mongodb.casbah.WriteConcern$.<init>(WriteConcern.scala:41)
    2. com.mongodb.casbah.WriteConcern$.<clinit>(WriteConcern.scala)
    3. com.mongodb.casbah.BaseImports$class.$init$(Implicits.scala:162)
    4. com.mongodb.casbah.Imports$.<init>(Implicits.scala:142)
    5. com.mongodb.casbah.Imports$.<clinit>(Implicits.scala)
    6. com.mongodb.casbah.MongoClient.apply(MongoClient.scala:219)
    6 frames
  4. com.stratio.datasource
    MongodbRDD.getPartitions
    1. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner.isShardedCollection(MongodbPartitioner.scala:78)
    2. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner$$anonfun$computePartitions$1.apply(MongodbPartitioner.scala:67)
    3. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner$$anonfun$computePartitions$1.apply(MongodbPartitioner.scala:66)
    4. com.stratio.datasource.util.using$.apply(using.scala:38)
    5. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner.computePartitions(MongodbPartitioner.scala:66)
    6. com.stratio.datasource.mongodb.rdd.MongodbRDD.getPartitions(MongodbRDD.scala:42)
    6 frames
  5. Spark
    RDD$$anonfun$partitions$2.apply
    1. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
    2. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
    2 frames
  6. Scala
    Option.getOrElse
    1. scala.Option.getOrElse(Option.scala:120)
    1 frame
  7. Spark
    RDD$$anonfun$partitions$2.apply
    1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
    2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
    3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
    4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
    4 frames
  8. Scala
    Option.getOrElse
    1. scala.Option.getOrElse(Option.scala:120)
    1 frame
  9. Spark
    PairRDDFunctions.reduceByKey
    1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
    2. org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:65)
    3. org.apache.spark.rdd.PairRDDFunctions$$anonfun$reduceByKey$3.apply(PairRDDFunctions.scala:331)
    4. org.apache.spark.rdd.PairRDDFunctions$$anonfun$reduceByKey$3.apply(PairRDDFunctions.scala:331)
    5. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
    6. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
    7. org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
    8. org.apache.spark.rdd.PairRDDFunctions.reduceByKey(PairRDDFunctions.scala:330)
    8 frames
  10. com.stratio.datasource
    MongodbRelation$$anonfun$1.apply
    1. com.stratio.datasource.mongodb.schema.MongodbSchema.schema(MongodbSchema.scala:47)
    2. com.stratio.datasource.mongodb.MongodbRelation.com$stratio$datasource$mongodb$MongodbRelation$$lazySchema$lzycompute(MongodbRelation.scala:63)
    3. com.stratio.datasource.mongodb.MongodbRelation.com$stratio$datasource$mongodb$MongodbRelation$$lazySchema(MongodbRelation.scala:60)
    4. com.stratio.datasource.mongodb.MongodbRelation$$anonfun$1.apply(MongodbRelation.scala:65)
    5. com.stratio.datasource.mongodb.MongodbRelation$$anonfun$1.apply(MongodbRelation.scala:65)
    5 frames
  11. Scala
    Option.getOrElse
    1. scala.Option.getOrElse(Option.scala:120)
    1 frame
  12. com.stratio.datasource
    DefaultSource.createRelation
    1. com.stratio.datasource.mongodb.MongodbRelation.<init>(MongodbRelation.scala:65)
    2. com.stratio.datasource.mongodb.DefaultSource.createRelation(DefaultSource.scala:36)
    2 frames
  13. org.apache.spark
    ResolvedDataSource$.apply
    1. org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:158)
    1 frame
  14. Spark Project SQL
    DataFrameReader.load
    1. org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
    1 frame
  15. com.askingdata.test
    TestSpark.main
    1. com.askingdata.test.TestSpark.main(TestSpark.java:23)
    1 frame