org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost): java.lang.NumberFormatException: For input string: "id"

Data Science | João_testeSW | 9 months ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    Function take() error - Link Analysis Research using Spark Mllib

    Data Science | 9 months ago | João_testeSW
    org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost): java.lang.NumberFormatException: For input string: "id"
  2. 0

    Function take() error - Link Analysis Research using Spark Mllib

    Stack Overflow | 9 months ago | João_testeSW
    org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost): java.lang.NumberFormatException: For input string: "id"
  3. 0

    {code}SPARK_CLASSPATH=jsr166e-1.1.0.jar:spark-cassandra-connector_2.10-1.3.0-M1.jar:guava-jdk5-14.0.1.jar:cassandra-driver-core-2.1.5.jar:cassandra-thrift-2.1.3.jar:joda-time-2.3.jar bin/spark-shell --conf spark.cassandra.connection.host=127.0.0.1 {code} create a cassandra table like this: {code} CREATE TABLE test1( customer_id int , uri text , browser text, epoch bigint , PRIMARY KEY (customer_id , epoch,uri) ) {code} In spark-shell: {code} case class Test(val epoch:Long,val uri:String,val browser:String,val customer_id:Int) val test1=Test(1400820884,"http://foobar","Firefox",123242) val df=sc.parallelize(Seq(test1)).toDF import org.apache.spark.sql._ df.save("org.apache.spark.sql.cassandra",SaveMode.Overwrite,options = Map( "c_table" -> "test1", "keyspace" -> "yana_test")) {code} The last call (df.save) produces the following error: {code} org.apache.spark.SparkException: Job aborted due to stage failure: Task 3 in stage 0.0 failed 1 times, most recent failure: Lost task 3.0 in stage 0.0 (TID 3, localhost): java.lang.NumberFormatException: For input string: "http://foobar" at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.lang.Long.parseLong(Long.java:441) at java.lang.Long.parseLong(Long.java:483) at scala.collection.immutable.StringLike$class.toLong(StringLike.scala:230) at scala.collection.immutable.StringOps.toLong(StringOps.scala:31) at com.datastax.spark.connector.types.TypeConverter$LongConverter$$anonfun$convertPF$3.applyOrElse(TypeConverter.scala:188) at scala.PartialFunction$AndThen.applyOrElse(PartialFunction.scala:184) at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:38) at com.datastax.spark.connector.types.TypeConverter$JavaLongConverter$.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:196) at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:51) at com.datastax.spark.connector.types.TypeConverter$JavaLongConverter$.convert(TypeConverter.scala:196) at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter$$anonfun$convertPF$23.applyOrElse(TypeConverter.scala:632) at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:38) at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:625) at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:51) at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.convert(TypeConverter.scala:625) at com.datastax.spark.connector.writer.SqlRowWriter$$anonfun$readColumnValues$1.apply$mcVI$sp(SqlRowWriter.scala:21) at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) at com.datastax.spark.connector.writer.SqlRowWriter.readColumnValues(SqlRowWriter.scala:20) at com.datastax.spark.connector.writer.SqlRowWriter.readColumnValues(SqlRowWriter.scala:8) at com.datastax.spark.connector.writer.BoundStatementBuilder.bind(BoundStatementBuilder.scala:35) at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:106) at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:31) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at com.datastax.spark.connector.writer.GroupingBatchBuilder.foreach(GroupingBatchBuilder.scala:31) at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:135) at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:119) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:105) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:104) at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:156) at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:104) at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:119) at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61) at org.apache.spark.scheduler.Task.run(Task.scala:64) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) {code}

    DataStax JIRA | 2 years ago | Yana Kadiyska
    org.apache.spark.SparkException: Job aborted due to stage failure: Task 3 in stage 0.0 failed 1 times, most recent failure: Lost task 3.0 in stage 0.0 (TID 3, localhost): java.lang.NumberFormatException: For input string: "http://foobar"
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    {code}SPARK_CLASSPATH=jsr166e-1.1.0.jar:spark-cassandra-connector_2.10-1.3.0-M1.jar:guava-jdk5-14.0.1.jar:cassandra-driver-core-2.1.5.jar:cassandra-thrift-2.1.3.jar:joda-time-2.3.jar bin/spark-shell --conf spark.cassandra.connection.host=127.0.0.1 {code} create a cassandra table like this: {code} CREATE TABLE test1( customer_id int , uri text , browser text, epoch bigint , PRIMARY KEY (customer_id , epoch,uri) ) {code} In spark-shell: {code} case class Test(val epoch:Long,val uri:String,val browser:String,val customer_id:Int) val test1=Test(1400820884,"http://foobar","Firefox",123242) val df=sc.parallelize(Seq(test1)).toDF import org.apache.spark.sql._ df.save("org.apache.spark.sql.cassandra",SaveMode.Overwrite,options = Map( "c_table" -> "test1", "keyspace" -> "yana_test")) {code} The last call (df.save) produces the following error: {code} org.apache.spark.SparkException: Job aborted due to stage failure: Task 3 in stage 0.0 failed 1 times, most recent failure: Lost task 3.0 in stage 0.0 (TID 3, localhost): java.lang.NumberFormatException: For input string: "http://foobar" at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.lang.Long.parseLong(Long.java:441) at java.lang.Long.parseLong(Long.java:483) at scala.collection.immutable.StringLike$class.toLong(StringLike.scala:230) at scala.collection.immutable.StringOps.toLong(StringOps.scala:31) at com.datastax.spark.connector.types.TypeConverter$LongConverter$$anonfun$convertPF$3.applyOrElse(TypeConverter.scala:188) at scala.PartialFunction$AndThen.applyOrElse(PartialFunction.scala:184) at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:38) at com.datastax.spark.connector.types.TypeConverter$JavaLongConverter$.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:196) at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:51) at com.datastax.spark.connector.types.TypeConverter$JavaLongConverter$.convert(TypeConverter.scala:196) at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter$$anonfun$convertPF$23.applyOrElse(TypeConverter.scala:632) at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:38) at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:625) at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:51) at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.convert(TypeConverter.scala:625) at com.datastax.spark.connector.writer.SqlRowWriter$$anonfun$readColumnValues$1.apply$mcVI$sp(SqlRowWriter.scala:21) at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) at com.datastax.spark.connector.writer.SqlRowWriter.readColumnValues(SqlRowWriter.scala:20) at com.datastax.spark.connector.writer.SqlRowWriter.readColumnValues(SqlRowWriter.scala:8) at com.datastax.spark.connector.writer.BoundStatementBuilder.bind(BoundStatementBuilder.scala:35) at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:106) at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:31) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at com.datastax.spark.connector.writer.GroupingBatchBuilder.foreach(GroupingBatchBuilder.scala:31) at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:135) at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:119) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:105) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:104) at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:156) at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:104) at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:119) at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61) at org.apache.spark.scheduler.Task.run(Task.scala:64) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) {code}

    DataStax JIRA | 2 years ago | Yana Kadiyska
    org.apache.spark.SparkException: Job aborted due to stage failure: Task 3 in stage 0.0 failed 1 times, most recent failure: Lost task 3.0 in stage 0.0 (TID 3, localhost): java.lang.NumberFormatException: For input string: "http://foobar"
  6. 0

    GitHub comment 184#166739266

    GitHub | 1 year ago | nealmcb
    org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 76.0 failed 1 times, most recent failure: Lost task 0.0 in stage 76.0 (TID 231, localhost): java.lang.NumberFormatException: For input string: "89959) 2002 NT7"

  1. tyson925 1 times, last 1 year ago
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. org.apache.spark.SparkException

    Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost): java.lang.NumberFormatException: For input string: "id"

    at java.lang.NumberFormatException.forInputString()
  2. Java RT
    Long.parseLong
    1. java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
    2. java.lang.Long.parseLong(Long.java:441)
    3. java.lang.Long.parseLong(Long.java:483)
    3 frames
  3. Scala
    StringOps.toLong
    1. scala.collection.immutable.StringLike$class.toLong(StringLike.scala:230)
    2. scala.collection.immutable.StringOps.toLong(StringOps.scala:31)
    2 frames
  4. Unknown
    $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply
    1. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.parseTransactions(<console>:38)
    2. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:42)
    3. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:42)
    3 frames
  5. Scala
    Iterator$$anon$11.next
    1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
    1 frame
  6. Spark
    Executor$TaskRunner.run
    1. org.apache.spark.storage.MemoryStore.unrollSafely(MemoryStore.scala:285)
    2. org.apache.spark.CacheManager.putInBlockManager(CacheManager.scala:171)
    3. org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:78)
    4. org.apache.spark.rdd.RDD.iterator(RDD.scala:268)
    5. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
    6. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
    7. org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
    8. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
    9. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
    10. org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
    11. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
    12. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
    13. org.apache.spark.scheduler.Task.run(Task.scala:89)
    14. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
    14 frames
  7. Java RT
    Thread.run
    1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    3. java.lang.Thread.run(Thread.java:745)
    3 frames