Advertisement
Guest User

Untitled

a guest
Feb 21st, 2017
75
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.00 KB | None | 0 0
  1. import org.apache.hadoop.hbase.client.{HBaseAdmin, Result}
  2. import org.apache.hadoop.hbase.{ HBaseConfiguration, HTableDescriptor }
  3. import org.apache.hadoop.hbase.mapreduce.TableInputFormat
  4. import org.apache.hadoop.hbase.io.ImmutableBytesWritable
  5.  
  6. import org.apache.spark._
  7.  
  8. object HBaseRead {
  9. def main(args: Array[String]) {
  10. val sparkConf = new SparkConf().setAppName("HBaseRead").setMaster("local[2]")
  11. val sc = new SparkContext(sparkConf)
  12. val conf = HBaseConfiguration.create()
  13. val tableName = "table1"
  14.  
  15. System.setProperty("user.name", "hdfs")
  16. System.setProperty("HADOOP_USER_NAME", "hdfs")
  17. conf.set("hbase.master", "localhost:60000")
  18. conf.setInt("timeout", 120000)
  19. conf.set("hbase.zookeeper.quorum", "localhost")
  20. conf.set("zookeeper.znode.parent", "/hbase-unsecure")
  21. conf.set(TableInputFormat.INPUT_TABLE, tableName)
  22.  
  23. val admin = new HBaseAdmin(conf)
  24. if (!admin.isTableAvailable(tableName)) {
  25. val tableDesc = new HTableDescriptor(tableName)
  26. admin.createTable(tableDesc)
  27. }
  28.  
  29. val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result])
  30. println("Number of Records found : " + hBaseRDD.count())
  31. sc.stop()
  32. }
  33. }
  34.  
  35. <dependency>
  36. <groupId>org.apache.hbase</groupId>
  37. <artifactId>hbase-client</artifactId>
  38. <version>1.3.0</version>
  39. </dependency>
  40. <dependency>
  41. <groupId>org.apache.hbase</groupId>
  42. <artifactId>hbase-server</artifactId>
  43. <version>1.3.0</version>
  44. </dependency>
  45.  
  46. public static void main(String[] args) throws Exception {
  47. SparkConf sparkConf = new SparkConf().setAppName("HBaseRead").setMaster("local[*]");
  48. JavaSparkContext jsc = new JavaSparkContext(sparkConf);
  49. Configuration hbaseConf = HBaseConfiguration.create();
  50. hbaseConf.set(TableInputFormat.INPUT_TABLE, "my_table");
  51. JavaPairRDD<ImmutableBytesWritable, Result> javaPairRdd = jsc.newAPIHadoopRDD(hbaseConf, TableInputFormat.class,ImmutableBytesWritable.class, Result.class);
  52. jsc.stop();
  53. }
  54. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement