org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family 0 does not exist in region SYSTEM.TABLE,,1421139725748.a46320eb144712e231b1dd8ab3da30aa. in table {NAME => 'SYSTEM.TABLE', SPLIT_POLICY => 'org.apache.phoenix.schema.MetaDataSplitPolicy', coprocessor$7 => '|org.apache.phoenix.coprocessor.MetaDataRegionObserver|2|', coprocessor$5 => '|org.apache.phoenix.coprocessor.ServerCachingEndpointImpl|1|', coprocessor$6 => '|org.apache.phoenix.coprocessor.MetaDataEndpointImpl|1|', coprocessor$3 => '|org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver|1|', coprocessor$4 => '|org.apache.phoenix.join.HashJoiningRegionObserver|1|', coprocessor$1 => '|org.apache.phoenix.coprocessor.ScanRegionObserver|1|', UpgradeTo21 => 'true', coprocessor$2 => '|org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver|1|', UpgradeTo20 => 'true', UpgradeTo22 => 'true', FAMILIES => [{NAME => '_0', ENCODE_ON_DISK => 'true', BLOOMFILTER => 'NONE', VERSIONS => '1000', IN_MEMORY => 'false', KEEP_DELETED_CELLS => 'true', DATA_BLOCK_ENCODING => 'FAST_DIFF', TTL => '2147483647', COMPRESSION => 'NONE', MIN_VERSIONS => '0', BLOCKCACHE => 'true', BLOCKSIZE => '65536', REPLICATION_SCOPE => '0'}]}

phoenix-user | James Taylor | 2 years ago
  1. 0

    Re: Problems upgrading 2.2.3-incubating to 3.2.1

    phoenix-user | 2 years ago | James Taylor
    org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family 0 does not exist in region SYSTEM.TABLE,,1421139725748.a46320eb144712e231b1dd8ab3da30aa. in table {NAME => 'SYSTEM.TABLE', SPLIT_POLICY => 'org.apache.phoenix.schema.MetaDataSplitPolicy', coprocessor$7 => '|org.apache.phoenix.coprocessor.MetaDataRegionObserver|2|', coprocessor$5 => '|org.apache.phoenix.coprocessor.ServerCachingEndpointImpl|1|', coprocessor$6 => '|org.apache.phoenix.coprocessor.MetaDataEndpointImpl|1|', coprocessor$3 => '|org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver|1|', coprocessor$4 => '|org.apache.phoenix.join.HashJoiningRegionObserver|1|', coprocessor$1 => '|org.apache.phoenix.coprocessor.ScanRegionObserver|1|', UpgradeTo21 => 'true', coprocessor$2 => '|org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver|1|', UpgradeTo20 => 'true', UpgradeTo22 => 'true', FAMILIES => [{NAME => '_0', ENCODE_ON_DISK => 'true', BLOOMFILTER => 'NONE', VERSIONS => '1000', IN_MEMORY => 'false', KEEP_DELETED_CELLS => 'true', DATA_BLOCK_ENCODING => 'FAST_DIFF', TTL => '2147483647', COMPRESSION => 'NONE', MIN_VERSIONS => '0', BLOCKCACHE => 'true', BLOCKSIZE => '65536', REPLICATION_SCOPE => '0'}]}
  2. 0

    Not able to put Twitter4j.Place object attributes from scala application

    Google Groups | 2 years ago | reddibabu
    org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family 'cf' does not exist in region twitter_mobile_data,,1404930369033.aa8c4d9370c3e8882beb81659734e2c0. in table 'twitter_mobile_data', {NAME => 'cf', DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS => '3', TTL => '2147483647', MIN_VERSIONS => '0', KEEP_DELETED_CELLS => 'false', BLOCKSIZE => '65536', ENCODE_ON_DISK => 'true', IN_MEMORY => 'false', BLOCKCACHE => 'true'}
  3. 0

    Re: A demo setup on a single linux server

    hadoop-chukwa-user | 6 years ago | Eric Yang
    org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family chukwaAgent_httpSender does not exist in region Hadoop,,1307351718977.567db4857e94f3729df7cc1d3ba9686c. in table {NAME => 'Hadoop', FAMILIES => [{NAME => 'ClientTrace', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_FSDirectory', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_FSNamesystem', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_datanode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_namenode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'jvm_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_job', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_jobtracker', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_shuffleOutput', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_tasktracker', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'rpc_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}]}
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Re: A demo setup on a single linux server

    hadoop-chukwa-user | 6 years ago | Eric Yang
    org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family chukwaAgent_chunkQueue does not exist in region Hadoop,,1307351718977.567db4857e94f3729df7cc1d3ba9686c. in table {NAME => 'Hadoop', FAMILIES => [{NAME => 'ClientTrace', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_FSDirectory', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_FSNamesystem', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_datanode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_namenode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'jvm_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_job', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_jobtracker', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_shuffleOutput', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_tasktracker', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'rpc_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}]}
  6. 0

    Re: A demo setup on a single linux server

    hadoop-chukwa-user | 6 years ago | Preetam Patil
    org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family chukwaAgent_metrics does not exist in region Hadoop,,1307351718977.567db4857e94f3729df7cc1d3ba9686c. in table {NAME => 'Hadoop', FAMILIES => [{NAME => 'ClientTrace', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_FSDirectory', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_FSNamesystem', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_datanode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'dfs_namenode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'jvm_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_job', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_jobtracker', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_shuffleOutput', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_tasktracker', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'rpc_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', VERSIONS => '65535', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}]}

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException

      Column family 0 does not exist in region SYSTEM.TABLE,,1421139725748.a46320eb144712e231b1dd8ab3da30aa. in table {NAME => 'SYSTEM.TABLE', SPLIT_POLICY => 'org.apache.phoenix.schema.MetaDataSplitPolicy', coprocessor$7 => '|org.apache.phoenix.coprocessor.MetaDataRegionObserver|2|', coprocessor$5 => '|org.apache.phoenix.coprocessor.ServerCachingEndpointImpl|1|', coprocessor$6 => '|org.apache.phoenix.coprocessor.MetaDataEndpointImpl|1|', coprocessor$3 => '|org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver|1|', coprocessor$4 => '|org.apache.phoenix.join.HashJoiningRegionObserver|1|', coprocessor$1 => '|org.apache.phoenix.coprocessor.ScanRegionObserver|1|', UpgradeTo21 => 'true', coprocessor$2 => '|org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver|1|', UpgradeTo20 => 'true', UpgradeTo22 => 'true', FAMILIES => [{NAME => '_0', ENCODE_ON_DISK => 'true', BLOOMFILTER => 'NONE', VERSIONS => '1000', IN_MEMORY => 'false', KEEP_DELETED_CELLS => 'true', DATA_BLOCK_ENCODING => 'FAST_DIFF', TTL => '2147483647', COMPRESSION => 'NONE', MIN_VERSIONS => '0', BLOCKCACHE => 'true', BLOCKSIZE => '65536', REPLICATION_SCOPE => '0'}]}

      at org.apache.hadoop.hbase.regionserver.HRegion.checkFamily()
    2. HBase - Client
      HRegion.getScanner
      1. org.apache.hadoop.hbase.regionserver.HRegion.checkFamily(HRegion.java:5341)
      2. org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:1744)
      3. org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:1722)
      3 frames
    3. Phoenix Core
      MetaDataRegionObserver$BuildIndexScheduleTask.run
      1. org.apache.phoenix.coprocessor.MetaDataRegionObserver$BuildIndexScheduleTask.run(MetaDataRegionObserver.java:174)
      1 frame
    4. Java RT
      Thread.run
      1. java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
      2. java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
      3. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
      4. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
      5. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      6. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      7. java.lang.Thread.run(Thread.java:745)
      7 frames