java.io.IOException: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple

JIRA | Russell Jurney | 5 years ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    Script: /* AvroStorage */ register /me/pig/build/ivy/lib/Pig/avro-1.5.3.jar register /me/pig/build/ivy/lib/Pig/json-simple-1.1.jar register /me/pig/contrib/piggybank/java/piggybank.jar define AvroStorage org.apache.pig.piggybank.storage.avro.AvroStorage(); /* MongoStorage */ register /me/mongo-hadoop/mongo-2.7.3.jar /*register /me/mongo-hadoop/core/target/mongo-hadoop-core-1.0.0.jar register /me/mongo-hadoop/pig/target/mongo-hadoop-pig-1.0.0.jar*/ register /me/tmp/mongo-hadoop/core/target/mongo-hadoop-core-1.1.0-SNAPSHOT.jar register /me/tmp/mongo-hadoop/pig/target/mongo-hadoop-pig-1.1.0-SNAPSHOT.jar set mapred.map.tasks.speculative.execution false set mapred.reduce.tasks.speculative.execution false define MongoStorage com.mongodb.hadoop.pig.MongoStorage(); /* Get rid of emails with reply_to, as they confuse everything in mailing lists. */ avro_emails = load '/me/tmp/thu_emails' using AvroStorage(); emails = filter avro_emails by (froms is not null); email_threads = foreach (group emails by thread_id) { thread = order emails by date; generate group as thread_id, thread; }; email_threads = foreach email_threads generate thread_id, thread as thread:bag{email:tuple(message_id:chararray, thread_id:chararray, in_reply_to:chararray, subject:chararray, body:chararray, date:chararray, froms:bag{from:tuple(real_name:chararray, address:chararray)}, tos:bag{to:tuple(real_name:chararray, address:chararray)}, ccs:bag{cc:tuple(real_name:chararray, address:chararray)}, bccs:bag{bcc:tuple(real_name:chararray, address:chararray)}, reply_tos:bag{reply_to:tuple(real_name:chararray, address:chararray)})}; store email_threads into 'mongodb://localhost/agile_data.threads' using MongoStorage(); Result: java.io.IOException: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.runPipeline(PigGenericMapReduce.java:465) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.processOnePackageOutput(PigGenericMapReduce.java:428) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.reduce(PigGenericMapReduce.java:408) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.reduce(PigGenericMapReduce.java:262) at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:176) at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:649) at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:417) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:260) Caused by: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:234) at org.bson.BasicBSONEncoder.putIterable(BasicBSONEncoder.java:259) at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:198) at org.bson.BasicBSONEncoder.putMap(BasicBSONEncoder.java:274) at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:196) at org.bson.BasicBSONEncoder.putIterable(BasicBSONEncoder.java:259) at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:198) at org.bson.BasicBSONEncoder.putObject(BasicBSONEncoder.java:140) at org.bson.BasicBSONEncoder.putObject(BasicBSONEncoder.java:86) at com.mongodb.DefaultDBEncoder.writeObject(DefaultDBEncoder.java:27) at com.mongodb.OutMessage.putObject(OutMessage.java:142) at com.mongodb.DBApiLayer$MyCollection.insert(DBApiLayer.java:252) at com.mongodb.DBApiLayer$MyCollection.insert(DBApiLayer.java:211) at com.mongodb.DBCollection.insert(DBCollection.java:57) at com.mongodb.DBCollection.insert(DBCollection.java:87) at com.mongodb.DBCollection.save(DBCollection.java:728) at com.mongodb.DBCollection.save(DBCollection.java:703) at com.mongodb.hadoop.output.MongoRecordWriter.write(MongoRecordWriter.java:77) at com.mongodb.hadoop.pig.MongoStorage.putNext(MongoStorage.java:94) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat$PigRecordWriter.write(PigOutputFormat.java:139) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat$PigRecordWriter.write(PigOutputFormat.java:98) at org.apache.hadoop.mapred.ReduceTask$NewTrackingRecordWriter.write(ReduceTask.java:587) at org.apache.hadoop.mapreduce.TaskInputOutputContext.write(TaskInputOutputContext.java:80) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.runPipeline(PigGenericMapReduce.java:463) ... 7 more

    JIRA | 5 years ago | Russell Jurney
    java.io.IOException: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple
  2. 0

    Script: /* AvroStorage */ register /me/pig/build/ivy/lib/Pig/avro-1.5.3.jar register /me/pig/build/ivy/lib/Pig/json-simple-1.1.jar register /me/pig/contrib/piggybank/java/piggybank.jar define AvroStorage org.apache.pig.piggybank.storage.avro.AvroStorage(); /* MongoStorage */ register /me/mongo-hadoop/mongo-2.7.3.jar /*register /me/mongo-hadoop/core/target/mongo-hadoop-core-1.0.0.jar register /me/mongo-hadoop/pig/target/mongo-hadoop-pig-1.0.0.jar*/ register /me/tmp/mongo-hadoop/core/target/mongo-hadoop-core-1.1.0-SNAPSHOT.jar register /me/tmp/mongo-hadoop/pig/target/mongo-hadoop-pig-1.1.0-SNAPSHOT.jar set mapred.map.tasks.speculative.execution false set mapred.reduce.tasks.speculative.execution false define MongoStorage com.mongodb.hadoop.pig.MongoStorage(); /* Get rid of emails with reply_to, as they confuse everything in mailing lists. */ avro_emails = load '/me/tmp/thu_emails' using AvroStorage(); emails = filter avro_emails by (froms is not null); email_threads = foreach (group emails by thread_id) { thread = order emails by date; generate group as thread_id, thread; }; email_threads = foreach email_threads generate thread_id, thread as thread:bag{email:tuple(message_id:chararray, thread_id:chararray, in_reply_to:chararray, subject:chararray, body:chararray, date:chararray, froms:bag{from:tuple(real_name:chararray, address:chararray)}, tos:bag{to:tuple(real_name:chararray, address:chararray)}, ccs:bag{cc:tuple(real_name:chararray, address:chararray)}, bccs:bag{bcc:tuple(real_name:chararray, address:chararray)}, reply_tos:bag{reply_to:tuple(real_name:chararray, address:chararray)})}; store email_threads into 'mongodb://localhost/agile_data.threads' using MongoStorage(); Result: java.io.IOException: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.runPipeline(PigGenericMapReduce.java:465) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.processOnePackageOutput(PigGenericMapReduce.java:428) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.reduce(PigGenericMapReduce.java:408) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.reduce(PigGenericMapReduce.java:262) at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:176) at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:649) at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:417) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:260) Caused by: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:234) at org.bson.BasicBSONEncoder.putIterable(BasicBSONEncoder.java:259) at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:198) at org.bson.BasicBSONEncoder.putMap(BasicBSONEncoder.java:274) at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:196) at org.bson.BasicBSONEncoder.putIterable(BasicBSONEncoder.java:259) at org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:198) at org.bson.BasicBSONEncoder.putObject(BasicBSONEncoder.java:140) at org.bson.BasicBSONEncoder.putObject(BasicBSONEncoder.java:86) at com.mongodb.DefaultDBEncoder.writeObject(DefaultDBEncoder.java:27) at com.mongodb.OutMessage.putObject(OutMessage.java:142) at com.mongodb.DBApiLayer$MyCollection.insert(DBApiLayer.java:252) at com.mongodb.DBApiLayer$MyCollection.insert(DBApiLayer.java:211) at com.mongodb.DBCollection.insert(DBCollection.java:57) at com.mongodb.DBCollection.insert(DBCollection.java:87) at com.mongodb.DBCollection.save(DBCollection.java:728) at com.mongodb.DBCollection.save(DBCollection.java:703) at com.mongodb.hadoop.output.MongoRecordWriter.write(MongoRecordWriter.java:77) at com.mongodb.hadoop.pig.MongoStorage.putNext(MongoStorage.java:94) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat$PigRecordWriter.write(PigOutputFormat.java:139) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat$PigRecordWriter.write(PigOutputFormat.java:98) at org.apache.hadoop.mapred.ReduceTask$NewTrackingRecordWriter.write(ReduceTask.java:587) at org.apache.hadoop.mapreduce.TaskInputOutputContext.write(TaskInputOutputContext.java:80) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.runPipeline(PigGenericMapReduce.java:463) ... 7 more

    JIRA | 5 years ago | Russell Jurney
    java.io.IOException: java.lang.IllegalArgumentException: can't serialize class org.apache.pig.data.BinSedesTuple
  3. 0

    MongoDb: fields stored in the db can't start with '$'

    GitHub | 5 years ago | tiagoboldt
    java.lang.IllegalArgumentException: fields stored in the db can't start with '$' (Bad Key: '$outer')
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Problem Saving Pojo

    GitHub | 4 years ago | harrychan
    java.lang.IllegalArgumentException: 'ok' should never be null...
  6. 0

    Support for @Property for list of embedded objects [moved]

    GitHub | 4 years ago | jyemin
    java.lang.IllegalArgumentException: can't serialize class com.package.Foo

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.IllegalArgumentException

      can't serialize class org.apache.pig.data.BinSedesTuple

      at org.bson.BasicBSONEncoder._putObjectField()
    2. MongoDB Java Driver
      DBCollection.save
      1. org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:234)
      2. org.bson.BasicBSONEncoder.putIterable(BasicBSONEncoder.java:259)
      3. org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:198)
      4. org.bson.BasicBSONEncoder.putMap(BasicBSONEncoder.java:274)
      5. org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:196)
      6. org.bson.BasicBSONEncoder.putIterable(BasicBSONEncoder.java:259)
      7. org.bson.BasicBSONEncoder._putObjectField(BasicBSONEncoder.java:198)
      8. org.bson.BasicBSONEncoder.putObject(BasicBSONEncoder.java:140)
      9. org.bson.BasicBSONEncoder.putObject(BasicBSONEncoder.java:86)
      10. com.mongodb.DefaultDBEncoder.writeObject(DefaultDBEncoder.java:27)
      11. com.mongodb.OutMessage.putObject(OutMessage.java:142)
      12. com.mongodb.DBApiLayer$MyCollection.insert(DBApiLayer.java:252)
      13. com.mongodb.DBApiLayer$MyCollection.insert(DBApiLayer.java:211)
      14. com.mongodb.DBCollection.insert(DBCollection.java:57)
      15. com.mongodb.DBCollection.insert(DBCollection.java:87)
      16. com.mongodb.DBCollection.save(DBCollection.java:728)
      17. com.mongodb.DBCollection.save(DBCollection.java:703)
      17 frames
    3. com.mongodb.hadoop
      MongoStorage.putNext
      1. com.mongodb.hadoop.output.MongoRecordWriter.write(MongoRecordWriter.java:77)
      2. com.mongodb.hadoop.pig.MongoStorage.putNext(MongoStorage.java:94)
      2 frames
    4. org.apache.pig
      PigOutputFormat$PigRecordWriter.write
      1. org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat$PigRecordWriter.write(PigOutputFormat.java:139)
      2. org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat$PigRecordWriter.write(PigOutputFormat.java:98)
      2 frames
    5. Hadoop
      TaskInputOutputContext.write
      1. org.apache.hadoop.mapred.ReduceTask$NewTrackingRecordWriter.write(ReduceTask.java:587)
      2. org.apache.hadoop.mapreduce.TaskInputOutputContext.write(TaskInputOutputContext.java:80)
      2 frames
    6. org.apache.pig
      PigGenericMapReduce$Reduce.reduce
      1. org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.runPipeline(PigGenericMapReduce.java:463)
      2. org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.processOnePackageOutput(PigGenericMapReduce.java:428)
      3. org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.reduce(PigGenericMapReduce.java:408)
      4. org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapReduce$Reduce.reduce(PigGenericMapReduce.java:262)
      4 frames
    7. Hadoop
      LocalJobRunner$Job.run
      1. org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:176)
      2. org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:649)
      3. org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:417)
      4. org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:260)
      4 frames