Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package get;
- import batch.batchOp;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.Bytes;
- import java.io.IOException;
- import java.util.ArrayList;
- import java.util.List;
- import java.util.NavigableMap;
- import helper.printValues;
- /**
- * Created by swethakolalapudi on 6/24/16.
- */
- public class listGets {
- public static void main(String[] args) throws IOException {
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(TableName.valueOf("notifications"));
- List<Get> gets = new ArrayList<Get>();
- Get get1 = new Get(Bytes.toBytes("2"));
- get1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
- get1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("for_user"));
- Get get2 = new Get(Bytes.toBytes("3"));
- get2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
- get2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("from_user"));
- gets.add(get1);
- gets.add(get2);
- Result[] results = table.get(gets);
- for (Result result : results) {
- printValues.printAllValues(result);
- }
- }
- }
- ===
- Single Get:
- package get;
- import batch.batchOp;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.Bytes;
- import javax.xml.crypto.dsig.keyinfo.KeyValue;
- import java.io.IOException;
- import java.util.Map;
- import java.util.NavigableMap;
- import helper.printValues;
- public class singleGet {
- public static void main(String[] args) throws IOException {
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(TableName.valueOf("notifications"));
- Get get =new Get(Bytes.toBytes("2"));
- get.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
- get.addColumn(Bytes.toBytes("metrics"), Bytes.toBytes("open"));
- Result result = table.get(get);
- byte[] val= result.getValue(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
- System.out.println("Value:" + Bytes.toString(val));
- printValues.printAllValues(result);
- }
- }
- =====
- Put
- package put;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.Connection;
- import org.apache.hadoop.hbase.client.ConnectionFactory;
- import org.apache.hadoop.hbase.client.Put;
- import org.apache.hadoop.hbase.client.Table;
- import org.apache.hadoop.hbase.util.Bytes;
- import java.io.IOException;
- import java.util.ArrayList;
- import java.util.List;
- public class listPuts {
- public static void main(String[] args) throws IOException {
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(TableName.valueOf("notifications"));
- Put put1 =new Put(Bytes.toBytes("4"));
- put1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Friend Request"));
- put1.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_user"),Bytes.toBytes("Daniel"));
- put1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("from_user"), Bytes.toBytes("Ryan"));
- Put put2 =new Put(Bytes.toBytes("5"));
- put2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Comment"));
- put2.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_user"),Bytes.toBytes("Brendan"));
- put2.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("from_user"),Bytes.toBytes("Rick"));
- put2.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_thing"),Bytes.toBytes("link"));
- put2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("link"), Bytes.toBytes("link"));
- List<Put> puts = new ArrayList<Put>();
- puts.add(put1);
- puts.add(put2);
- table.put(puts);
- }
- }
- =====
- package put;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.Bytes;
- import java.io.IOException;
- public class singlePut{
- public static void main(String[] args) throws IOException{
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(TableName.valueOf("notifications"));
- //HTable table = new HTable(conf, "notifications");
- Put put =new Put(Bytes.toBytes("1"));
- put.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Comment"));
- put.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_user"),Bytes.toBytes("Chaz"));
- put.addColumn(Bytes.toBytes("metrics"),Bytes.toBytes("open"),Bytes.toBytes("0"));
- table.put(put);
- }
- }
- ====
- package scan;
- import batch.batchOp;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.Connection;
- import org.apache.hadoop.hbase.client.ConnectionFactory;
- import org.apache.hadoop.hbase.client.Table;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.Bytes;
- import java.io.IOException;
- import java.util.NavigableMap;
- import helper.printValues;
- public class scanRows {
- public static void main(String[] args) throws IOException {
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(TableName.valueOf("notifications"));
- Scan fullScan = new Scan();
- ResultScanner fullScanResult = table.getScanner(fullScan);
- for (Result res : fullScanResult) {
- printValues.printAllValues(res);
- }
- fullScanResult.close();
- Scan colScan = new Scan();
- colScan.addFamily(Bytes.toBytes("metrics"));
- ResultScanner colScanResult = table.getScanner(colScan);
- for (Result res : colScanResult) {
- printValues.printAllValues(res);
- }
- colScanResult.close();
- Scan rangeScan = new Scan();
- rangeScan.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"))
- .setStartRow(Bytes.toBytes("2"))
- .setStopRow(Bytes.toBytes("2"));
- ResultScanner rangeScanResult = table.getScanner(rangeScan);
- for (Result res : rangeScanResult) {
- printValues.printAllValues(res);
- }
- rangeScanResult.close();
- }
- }
- =====
- Batch
- package batch;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.Bytes;
- import java.io.IOException;
- import java.util.ArrayList;
- import java.util.List;
- import java.util.NavigableMap;
- import helper.printValues;
- public class batchOp {
- public static void main(String[] args) throws IOException {
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(TableName.valueOf("notifications"));
- Put put =new Put(Bytes.toBytes("2"));
- put.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Comment"));
- put.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("for_user"), Bytes.toBytes("Swetha"));
- put.addColumn(Bytes.toBytes("metrics"), Bytes.toBytes("open"), Bytes.toBytes("0"));
- Get get =new Get(Bytes.toBytes("2"));
- get.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
- get.addColumn(Bytes.toBytes("metrics"), Bytes.toBytes("open"));
- Delete delete =new Delete(Bytes.toBytes("2"));
- delete.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
- delete.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("for_user"));
- List<Row> batch = new ArrayList<Row>();
- batch.add(put);
- batch.add(get);
- batch.add(delete);
- Object[] results = new Object[batch.size()];
- try{
- table.batch(batch,results);
- }catch (Exception e){
- System.err.println("Error: "+e);}
- for (int i=0; i<results.length; i++){
- Result res = (Result) results[i];
- if(!res.isEmpty()){
- printValues.printAllValues((Result) results[i]);
- }
- }
- }
- }
- Create Table:
- package admin;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.HColumnDescriptor;
- import org.apache.hadoop.hbase.HTableDescriptor;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.*;
- import java.io.IOException;
- public class createTable {
- public static void main(String[] args) throws IOException {
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- Admin admin = connection.getAdmin();
- HTableDescriptor tableName = new HTableDescriptor(TableName.valueOf("notifications"));
- tableName.addFamily(new HColumnDescriptor("attributes"));
- tableName.addFamily(new HColumnDescriptor("metrics"));
- if (!admin.tableExists(tableName.getTableName())) {
- System.out.print("Creating table. ");
- admin.createTable(tableName);
- System.out.println(" Done.");
- }
- }
- }
- ==
- MR:
- package mapReduce;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
- import org.apache.hadoop.io.IntWritable;
- import org.apache.hadoop.io.Text;
- import org.apache.hadoop.mapreduce.Job;
- import org.apache.hadoop.hbase.client.*;
- import java.io.IOException;
- /**
- * Created by swethakolalapudi on 6/29/16.
- */
- public class Main {
- public static void main(String[] args) throws Exception {
- Configuration conf = HBaseConfiguration.create();
- Job job = new Job(conf, "ExampleSummary");
- job.setJarByClass(Main.class); // class that contains mapper and reducer
- String sourceTable = "notifications";
- String targetTable = "summary";
- Scan scan = new Scan();
- scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
- scan.setCacheBlocks(false); // don't set to true for MR jobs
- // set other scan attrs
- TableMapReduceUtil.initTableMapperJob(
- sourceTable, // input table
- scan, // Scan instance to control CF and attribute selection
- MyMapper.class, // mapper class
- Text.class, // mapper output key
- IntWritable.class, // mapper output value
- job);
- TableMapReduceUtil.initTableReducerJob(
- targetTable, // output table
- MyTableReducer.class, // reducer class
- job);
- job.setNumReduceTasks(1); // at least one, adjust as required
- boolean b = job.waitForCompletion(true);
- if (!b) {
- throw new IOException("error with job!");
- }
- }
- }
- ===
- package mapReduce;
- import org.apache.hadoop.hbase.client.Result;
- import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
- import org.apache.hadoop.hbase.mapreduce.TableMapper;
- import org.apache.hadoop.io.IntWritable;
- import org.apache.hadoop.io.Text;
- import java.io.IOException;
- public class MyMapper extends TableMapper<Text, IntWritable> {
- public static final byte[] CF = "attributes".getBytes();
- public static final byte[] ATTR1 = "type".getBytes();
- private final IntWritable ONE = new IntWritable(1);
- private Text text = new Text();
- public void map(ImmutableBytesWritable row, Result value, Context context) throws IOException, InterruptedException {
- String val = new String(value.getValue(CF, ATTR1));
- text.set(val); // we can only emit Writables...
- context.write(text, ONE);
- }
- }
- ==
- package mapReduce;
- import org.apache.hadoop.hbase.client.Put;
- import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
- import org.apache.hadoop.hbase.mapreduce.TableReducer;
- import org.apache.hadoop.hbase.util.Bytes;
- import org.apache.hadoop.io.IntWritable;
- import org.apache.hadoop.io.Text;
- import java.io.IOException;
- public class MyTableReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
- public static final byte[] CF = "metrics".getBytes();
- public static final byte[] COUNT = "count".getBytes();
- public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
- int i = 0;
- for (IntWritable val : values) {
- i += val.get();
- }
- Put put = new Put(Bytes.toBytes(key.toString()));
- put.addColumn(CF, COUNT, Bytes.toBytes(i));
- context.write(null, put);
- }
- }
- ==
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement