Advertisement
Guest User

Untitled

a guest
May 19th, 2019
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 13.53 KB | None | 0 0
  1. package get;
  2.  
  3. import batch.batchOp;
  4. import org.apache.hadoop.conf.Configuration;
  5. import org.apache.hadoop.hbase.HBaseConfiguration;
  6. import org.apache.hadoop.hbase.TableName;
  7. import org.apache.hadoop.hbase.client.*;
  8. import org.apache.hadoop.hbase.util.Bytes;
  9.  
  10. import java.io.IOException;
  11. import java.util.ArrayList;
  12. import java.util.List;
  13. import java.util.NavigableMap;
  14. import helper.printValues;
  15. /**
  16. * Created by swethakolalapudi on 6/24/16.
  17. */
  18. public class listGets {
  19.  
  20. public static void main(String[] args) throws IOException {
  21.  
  22. Configuration conf = HBaseConfiguration.create();
  23.  
  24. Connection connection = ConnectionFactory.createConnection(conf);
  25. Table table = connection.getTable(TableName.valueOf("notifications"));
  26.  
  27. List<Get> gets = new ArrayList<Get>();
  28.  
  29.  
  30. Get get1 = new Get(Bytes.toBytes("2"));
  31. get1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
  32. get1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("for_user"));
  33.  
  34. Get get2 = new Get(Bytes.toBytes("3"));
  35. get2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
  36. get2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("from_user"));
  37.  
  38. gets.add(get1);
  39. gets.add(get2);
  40.  
  41.  
  42. Result[] results = table.get(gets);
  43.  
  44. for (Result result : results) {
  45. printValues.printAllValues(result);
  46.  
  47. }
  48. }
  49.  
  50.  
  51.  
  52.  
  53. }
  54.  
  55.  
  56. ===
  57.  
  58. Single Get:
  59. package get;
  60.  
  61. import batch.batchOp;
  62. import org.apache.hadoop.conf.Configuration;
  63. import org.apache.hadoop.hbase.HBaseConfiguration;
  64. import org.apache.hadoop.hbase.TableName;
  65. import org.apache.hadoop.hbase.client.*;
  66. import org.apache.hadoop.hbase.util.Bytes;
  67.  
  68. import javax.xml.crypto.dsig.keyinfo.KeyValue;
  69. import java.io.IOException;
  70. import java.util.Map;
  71. import java.util.NavigableMap;
  72.  
  73. import helper.printValues;
  74. public class singleGet {
  75.  
  76. public static void main(String[] args) throws IOException {
  77.  
  78. Configuration conf = HBaseConfiguration.create();
  79. Connection connection = ConnectionFactory.createConnection(conf);
  80. Table table = connection.getTable(TableName.valueOf("notifications"));
  81.  
  82. Get get =new Get(Bytes.toBytes("2"));
  83.  
  84. get.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
  85. get.addColumn(Bytes.toBytes("metrics"), Bytes.toBytes("open"));
  86.  
  87. Result result = table.get(get);
  88.  
  89. byte[] val= result.getValue(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
  90. System.out.println("Value:" + Bytes.toString(val));
  91.  
  92. printValues.printAllValues(result);
  93.  
  94. }
  95.  
  96.  
  97.  
  98.  
  99. }
  100.  
  101. =====
  102. Put
  103. package put;
  104.  
  105. import org.apache.hadoop.conf.Configuration;
  106. import org.apache.hadoop.hbase.HBaseConfiguration;
  107. import org.apache.hadoop.hbase.TableName;
  108. import org.apache.hadoop.hbase.client.Connection;
  109. import org.apache.hadoop.hbase.client.ConnectionFactory;
  110. import org.apache.hadoop.hbase.client.Put;
  111. import org.apache.hadoop.hbase.client.Table;
  112. import org.apache.hadoop.hbase.util.Bytes;
  113.  
  114. import java.io.IOException;
  115. import java.util.ArrayList;
  116. import java.util.List;
  117.  
  118. public class listPuts {
  119.  
  120. public static void main(String[] args) throws IOException {
  121.  
  122. Configuration conf = HBaseConfiguration.create();
  123. Connection connection = ConnectionFactory.createConnection(conf);
  124. Table table = connection.getTable(TableName.valueOf("notifications"));
  125.  
  126.  
  127. Put put1 =new Put(Bytes.toBytes("4"));
  128.  
  129. put1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Friend Request"));
  130. put1.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_user"),Bytes.toBytes("Daniel"));
  131. put1.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("from_user"), Bytes.toBytes("Ryan"));
  132.  
  133.  
  134. Put put2 =new Put(Bytes.toBytes("5"));
  135. put2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Comment"));
  136. put2.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_user"),Bytes.toBytes("Brendan"));
  137. put2.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("from_user"),Bytes.toBytes("Rick"));
  138. put2.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_thing"),Bytes.toBytes("link"));
  139. put2.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("link"), Bytes.toBytes("link"));
  140.  
  141.  
  142. List<Put> puts = new ArrayList<Put>();
  143. puts.add(put1);
  144. puts.add(put2);
  145.  
  146. table.put(puts);
  147.  
  148. }
  149. }
  150.  
  151.  
  152. =====
  153. package put;
  154.  
  155. import org.apache.hadoop.conf.Configuration;
  156. import org.apache.hadoop.hbase.HBaseConfiguration;
  157. import org.apache.hadoop.hbase.TableName;
  158. import org.apache.hadoop.hbase.client.*;
  159. import org.apache.hadoop.hbase.util.Bytes;
  160.  
  161. import java.io.IOException;
  162.  
  163. public class singlePut{
  164.  
  165. public static void main(String[] args) throws IOException{
  166.  
  167. Configuration conf = HBaseConfiguration.create();
  168.  
  169. Connection connection = ConnectionFactory.createConnection(conf);
  170.  
  171. Table table = connection.getTable(TableName.valueOf("notifications"));
  172. //HTable table = new HTable(conf, "notifications");
  173.  
  174. Put put =new Put(Bytes.toBytes("1"));
  175.  
  176. put.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Comment"));
  177. put.addColumn(Bytes.toBytes("attributes"),Bytes.toBytes("for_user"),Bytes.toBytes("Chaz"));
  178. put.addColumn(Bytes.toBytes("metrics"),Bytes.toBytes("open"),Bytes.toBytes("0"));
  179.  
  180. table.put(put);
  181.  
  182. }
  183. }
  184. ====
  185.  
  186. package scan;
  187.  
  188.  
  189. import batch.batchOp;
  190. import org.apache.hadoop.conf.Configuration;
  191. import org.apache.hadoop.hbase.HBaseConfiguration;
  192. import org.apache.hadoop.hbase.TableName;
  193. import org.apache.hadoop.hbase.client.Connection;
  194. import org.apache.hadoop.hbase.client.ConnectionFactory;
  195. import org.apache.hadoop.hbase.client.Table;
  196. import org.apache.hadoop.hbase.client.*;
  197. import org.apache.hadoop.hbase.util.Bytes;
  198.  
  199.  
  200. import java.io.IOException;
  201. import java.util.NavigableMap;
  202. import helper.printValues;
  203. public class scanRows {
  204.  
  205.  
  206. public static void main(String[] args) throws IOException {
  207.  
  208. Configuration conf = HBaseConfiguration.create();
  209.  
  210. Connection connection = ConnectionFactory.createConnection(conf);
  211. Table table = connection.getTable(TableName.valueOf("notifications"));
  212.  
  213.  
  214. Scan fullScan = new Scan();
  215. ResultScanner fullScanResult = table.getScanner(fullScan);
  216. for (Result res : fullScanResult) {
  217. printValues.printAllValues(res);
  218. }
  219. fullScanResult.close();
  220.  
  221. Scan colScan = new Scan();
  222. colScan.addFamily(Bytes.toBytes("metrics"));
  223. ResultScanner colScanResult = table.getScanner(colScan);
  224. for (Result res : colScanResult) {
  225. printValues.printAllValues(res);
  226. }
  227. colScanResult.close();
  228.  
  229. Scan rangeScan = new Scan();
  230. rangeScan.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"))
  231. .setStartRow(Bytes.toBytes("2"))
  232. .setStopRow(Bytes.toBytes("2"));
  233.  
  234.  
  235. ResultScanner rangeScanResult = table.getScanner(rangeScan);
  236. for (Result res : rangeScanResult) {
  237. printValues.printAllValues(res);
  238. }
  239. rangeScanResult.close();
  240.  
  241.  
  242. }
  243.  
  244.  
  245.  
  246. }
  247. =====
  248.  
  249. Batch
  250.  
  251. package batch;
  252.  
  253. import org.apache.hadoop.conf.Configuration;
  254. import org.apache.hadoop.hbase.HBaseConfiguration;
  255. import org.apache.hadoop.hbase.TableName;
  256. import org.apache.hadoop.hbase.client.*;
  257. import org.apache.hadoop.hbase.util.Bytes;
  258.  
  259. import java.io.IOException;
  260. import java.util.ArrayList;
  261. import java.util.List;
  262. import java.util.NavigableMap;
  263. import helper.printValues;
  264. public class batchOp {
  265. public static void main(String[] args) throws IOException {
  266.  
  267. Configuration conf = HBaseConfiguration.create();
  268. Connection connection = ConnectionFactory.createConnection(conf);
  269. Table table = connection.getTable(TableName.valueOf("notifications"));
  270.  
  271. Put put =new Put(Bytes.toBytes("2"));
  272. put.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"), Bytes.toBytes("Comment"));
  273. put.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("for_user"), Bytes.toBytes("Swetha"));
  274. put.addColumn(Bytes.toBytes("metrics"), Bytes.toBytes("open"), Bytes.toBytes("0"));
  275.  
  276. Get get =new Get(Bytes.toBytes("2"));
  277. get.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
  278. get.addColumn(Bytes.toBytes("metrics"), Bytes.toBytes("open"));
  279.  
  280. Delete delete =new Delete(Bytes.toBytes("2"));
  281. delete.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("type"));
  282. delete.addColumn(Bytes.toBytes("attributes"), Bytes.toBytes("for_user"));
  283.  
  284. List<Row> batch = new ArrayList<Row>();
  285. batch.add(put);
  286. batch.add(get);
  287. batch.add(delete);
  288.  
  289. Object[] results = new Object[batch.size()];
  290.  
  291. try{
  292. table.batch(batch,results);
  293. }catch (Exception e){
  294. System.err.println("Error: "+e);}
  295.  
  296. for (int i=0; i<results.length; i++){
  297. Result res = (Result) results[i];
  298. if(!res.isEmpty()){
  299. printValues.printAllValues((Result) results[i]);
  300. }
  301. }
  302. }
  303.  
  304.  
  305. }
  306.  
  307.  
  308.  
  309.  
  310. Create Table:
  311. package admin;
  312.  
  313.  
  314. import org.apache.hadoop.conf.Configuration;
  315. import org.apache.hadoop.hbase.HBaseConfiguration;
  316. import org.apache.hadoop.hbase.HColumnDescriptor;
  317. import org.apache.hadoop.hbase.HTableDescriptor;
  318. import org.apache.hadoop.hbase.TableName;
  319. import org.apache.hadoop.hbase.client.*;
  320.  
  321. import java.io.IOException;
  322.  
  323. public class createTable {
  324.  
  325. public static void main(String[] args) throws IOException {
  326.  
  327. Configuration conf = HBaseConfiguration.create();
  328.  
  329. Connection connection = ConnectionFactory.createConnection(conf);
  330.  
  331. Admin admin = connection.getAdmin();
  332.  
  333. HTableDescriptor tableName = new HTableDescriptor(TableName.valueOf("notifications"));
  334.  
  335. tableName.addFamily(new HColumnDescriptor("attributes"));
  336. tableName.addFamily(new HColumnDescriptor("metrics"));
  337.  
  338.  
  339. if (!admin.tableExists(tableName.getTableName())) {
  340. System.out.print("Creating table. ");
  341. admin.createTable(tableName);
  342. System.out.println(" Done.");
  343. }
  344. }
  345. }
  346.  
  347.  
  348. ==
  349.  
  350. MR:
  351. package mapReduce;
  352.  
  353. import org.apache.hadoop.conf.Configuration;
  354. import org.apache.hadoop.hbase.HBaseConfiguration;
  355. import org.apache.hadoop.hbase.TableName;
  356. import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
  357. import org.apache.hadoop.io.IntWritable;
  358. import org.apache.hadoop.io.Text;
  359. import org.apache.hadoop.mapreduce.Job;
  360. import org.apache.hadoop.hbase.client.*;
  361.  
  362. import java.io.IOException;
  363.  
  364. /**
  365. * Created by swethakolalapudi on 6/29/16.
  366. */
  367. public class Main {
  368.  
  369. public static void main(String[] args) throws Exception {
  370.  
  371. Configuration conf = HBaseConfiguration.create();
  372. Job job = new Job(conf, "ExampleSummary");
  373. job.setJarByClass(Main.class); // class that contains mapper and reducer
  374.  
  375.  
  376. String sourceTable = "notifications";
  377. String targetTable = "summary";
  378.  
  379. Scan scan = new Scan();
  380. scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
  381. scan.setCacheBlocks(false); // don't set to true for MR jobs
  382. // set other scan attrs
  383.  
  384. TableMapReduceUtil.initTableMapperJob(
  385. sourceTable, // input table
  386. scan, // Scan instance to control CF and attribute selection
  387. MyMapper.class, // mapper class
  388. Text.class, // mapper output key
  389. IntWritable.class, // mapper output value
  390. job);
  391. TableMapReduceUtil.initTableReducerJob(
  392. targetTable, // output table
  393. MyTableReducer.class, // reducer class
  394. job);
  395. job.setNumReduceTasks(1); // at least one, adjust as required
  396.  
  397. boolean b = job.waitForCompletion(true);
  398. if (!b) {
  399. throw new IOException("error with job!");
  400. }
  401.  
  402. }
  403.  
  404. }
  405.  
  406. ===
  407. package mapReduce;
  408.  
  409.  
  410. import org.apache.hadoop.hbase.client.Result;
  411. import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
  412. import org.apache.hadoop.hbase.mapreduce.TableMapper;
  413. import org.apache.hadoop.io.IntWritable;
  414. import org.apache.hadoop.io.Text;
  415.  
  416. import java.io.IOException;
  417.  
  418. public class MyMapper extends TableMapper<Text, IntWritable> {
  419. public static final byte[] CF = "attributes".getBytes();
  420. public static final byte[] ATTR1 = "type".getBytes();
  421.  
  422. private final IntWritable ONE = new IntWritable(1);
  423. private Text text = new Text();
  424.  
  425. public void map(ImmutableBytesWritable row, Result value, Context context) throws IOException, InterruptedException {
  426. String val = new String(value.getValue(CF, ATTR1));
  427. text.set(val); // we can only emit Writables...
  428.  
  429. context.write(text, ONE);
  430. }
  431. }
  432.  
  433. ==
  434. package mapReduce;
  435.  
  436. import org.apache.hadoop.hbase.client.Put;
  437. import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
  438. import org.apache.hadoop.hbase.mapreduce.TableReducer;
  439. import org.apache.hadoop.hbase.util.Bytes;
  440. import org.apache.hadoop.io.IntWritable;
  441. import org.apache.hadoop.io.Text;
  442.  
  443. import java.io.IOException;
  444.  
  445. public class MyTableReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
  446. public static final byte[] CF = "metrics".getBytes();
  447. public static final byte[] COUNT = "count".getBytes();
  448.  
  449. public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
  450. int i = 0;
  451. for (IntWritable val : values) {
  452. i += val.get();
  453. }
  454. Put put = new Put(Bytes.toBytes(key.toString()));
  455. put.addColumn(CF, COUNT, Bytes.toBytes(i));
  456.  
  457. context.write(null, put);
  458. }
  459. }
  460.  
  461. ==
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement