Advertisement
Guest User

Untitled

a guest
Feb 10th, 2016
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 16.63 KB | None | 0 0
  1. package com.cisco.installbase.hiveconnector;
  2.  
  3. import java.util.Date;
  4.  
  5. public class MainApp {
  6.  
  7. private static final String hiveDB = ReadProperties.getInstance().getProperty("hive_db");
  8. private static final String logTable = ReadProperties.getInstance().getProperty("IB_log_table");
  9. private static final String dataGovernanceLogTable = ReadProperties.getInstance().getProperty("SR_DG_table");
  10. private static final String dataGovernanceMasterTable = ReadProperties.getInstance()
  11. .getProperty("SR_DG_master_table");
  12.  
  13. private static final String count_xxccs_ds_sahdr_core = "select count(*) from " + hiveDB + "."
  14. + "xxccs_ds_sahdr_core";
  15. private static final String count_mtl_system_items_b = "select count(*) from " + hiveDB + "."
  16. + "mtl_system_items_b";
  17. private static final String count_xxccs_scdc_product_profile = "select count(*) from " + hiveDB + "."
  18. + "xxccs_scdc_product_profile";
  19. private static final String count_xxccs_ds_cvdprdline_detail = "select count(*) from " + hiveDB + "."
  20. + "xxccs_ds_cvdprdline_detail";
  21. private static final String count_xxccs_ds_instance_detail = "select count(*) from " + hiveDB + "."
  22. + "xxccs_ds_instance_detail";
  23.  
  24. private static int currentJobID = 0;
  25. private static Date startTime = null;
  26. private static Date stopTime = null;
  27. private static int runTime = 0;
  28.  
  29. static CommonDBUtilities commonDB = new CommonDBUtilities();
  30. static ShellUtilities shellUtilities = new ShellUtilities();
  31. static SqoopUtility sqoop = new SqoopUtility();
  32.  
  33. public static void main(String[] args) {
  34.  
  35. MainApp.startTimeLogger();
  36. System.out.println("Started the Job");
  37.  
  38. }
  39.  
  40. public static void startTimeLogger() {
  41. // getting the Job ID and the start time for the log table
  42.  
  43. if (Constants.isFlag()) {
  44. currentJobID = commonDB.getMaximumJobID();
  45. startTime = commonDB.getTime();
  46. MainApp.importTables();
  47. System.out.println("executing startTimeLogger");
  48. } else {
  49. MainApp.onFailure();
  50. JobMailer.PostMail("IB Load Failed", "Load failed while logging method name startTimeLogger()");
  51. System.out.println("executing startTimeLogger failed");
  52. }
  53. }
  54.  
  55. public static void importTables() {
  56. // Delete target directory before running the sqoop imports
  57.  
  58. if (Constants.isFlag()) {
  59. shellUtilities.DeleteDirectory(Constants.getMtlSystems());
  60. shellUtilities.DeleteDirectory(Constants.getProductLine());
  61. shellUtilities.DeleteDirectory(Constants.getInstanceDetail());
  62. shellUtilities.DeleteDirectory(Constants.getProductLine());
  63. shellUtilities.DeleteDirectory(Constants.getHeaderCore());
  64.  
  65. // Run the sqoop imports to load the data from oracle to hive
  66.  
  67. sqoop.runSqoop();
  68. MainApp.getCounts();
  69. System.out.println("executing importTables");
  70. } else {
  71. MainApp.onFailure();
  72. JobMailer.PostMail("IB Load Failed", "Load failed while running sqoop import method name importTables()");
  73. System.out.println("executing importTables failed");
  74. }
  75.  
  76. }
  77.  
  78. public static void getCounts() {
  79.  
  80. // Get the record counts for all the IB tables pulled
  81.  
  82. if (Constants.isFlag()) {
  83. commonDB.getCounts(count_xxccs_ds_instance_detail);
  84. commonDB.getCounts(count_xxccs_ds_cvdprdline_detail);
  85. commonDB.getCounts(count_xxccs_scdc_product_profile);
  86. commonDB.getCounts(count_mtl_system_items_b);
  87. commonDB.getCounts(count_xxccs_ds_sahdr_core);
  88. MainApp.stopTimeLogger();
  89. System.out.println("executing getCounts");
  90. } else {
  91. MainApp.onFailure();
  92. JobMailer.PostMail("IB Load Failed", "Load failed while getting counts method name getCounts()");
  93. System.out.println("executing getCounts failed");
  94. }
  95. }
  96.  
  97. public static void stopTimeLogger() {
  98. // Get the stop time or end time
  99. if (Constants.isFlag()) {
  100. stopTime = commonDB.getTime();
  101. MainApp.runTimeLogger();
  102. System.out.println("executing stopTimeLogger");
  103. } else {
  104. MainApp.onFailure();
  105. JobMailer.PostMail("IB Load Failed", "Load failed while end logging method name stopTimeLogger()");
  106. System.out.println("executing stopTimeLogger failed");
  107. }
  108. }
  109.  
  110. public static void runTimeLogger() {
  111. // Get the run time or total time taken
  112. if (Constants.isFlag()) {
  113. runTime = (int) (stopTime.getTime() - startTime.getTime()) / 1000 * 60 * 60 * 24;
  114. MainApp.onSuccess();
  115. MainApp.logGovernance();
  116. System.out.println("executing runTimeLogger");
  117. } else {
  118. MainApp.onFailure();
  119. JobMailer.PostMail("IB Load Failed", "Load failed while runtime logging method name runTimeLogger()");
  120. System.out.println("executing runTimeLogger failed");
  121. }
  122. }
  123.  
  124. public static void logGovernance() {
  125. // IB Data governance
  126.  
  127. if (Constants.isFlag()) {
  128. String dataGovernance = "Insert into table " + hiveDB + "." + dataGovernanceLogTable
  129. + " select Data_Asset_Reference,File_Name,Origin_System,Transfer_System," + startTime
  130. + ",Column_Reference,Element_Reference,Rule_Priority,Delete_By_Date,Classification,Geographic_Inclusion,Geographic_Restriction,Group_Inclusion,Group_Restriction,Reserved from "
  131. + hiveDB + "." + dataGovernanceMasterTable;
  132. commonDB.InsertToTable(dataGovernance);
  133. System.out.println("executing logGovernance");
  134. } else {
  135. MainApp.onFailure();
  136. JobMailer.PostMail("IB Load Failed",
  137. "Load failed while inserting into datagovernance method name logGovernance()");
  138. System.out.println("executing logGovernance failed");
  139. }
  140. }
  141.  
  142. public static void onFailure() {
  143. // Write to log on Failure
  144. String insertOnFailure = "insert into table " + hiveDB + "." + logTable + " select " + currentJobID + ","
  145. + stopTime + "," + runTime + "," + "FAILED from " + hiveDB + "." + "dual" + " limit 1; ";
  146. commonDB.InsertToTable(insertOnFailure);
  147. JobMailer.PostMail("IB Load Failed", "Load failed");
  148. System.out.println("executing onFailure");
  149. }
  150.  
  151. public static void onSuccess() {
  152. // Write to log on Success
  153. String insertOnSuccess = "insert into table " + hiveDB + "." + logTable + " select " + currentJobID + ","
  154. + stopTime + "," + runTime + "," + "SUCCESS from " + hiveDB + "." + "dual" + " limit 1; ";
  155. commonDB.InsertToTable(insertOnSuccess);
  156. JobMailer.PostMail("IB Load Successfully completed", "Load completed");
  157. System.out.println("executing onSuccess");
  158. }
  159.  
  160. }
  161.  
  162. package com.cisco.installbase.hiveconnector;
  163.  
  164. import java.io.IOException;
  165. import java.io.InputStream;
  166. import java.util.Properties;
  167. import java.util.Set;
  168.  
  169. public class ReadProperties {
  170.  
  171. private final Properties props = new Properties();
  172.  
  173. private ReadProperties() {
  174. InputStream in = this.getClass().getClassLoader().getResourceAsStream("config.properties");
  175. try {
  176. props.load(in);
  177. Constants.setFlag(true);
  178. } catch (IOException e) {
  179. e.printStackTrace();
  180. Constants.setFlag(false);
  181. }
  182. }
  183.  
  184. private static class PropHolder {
  185. private static final ReadProperties INSTANCE = new ReadProperties();
  186. }
  187.  
  188. public static ReadProperties getInstance() {
  189. return PropHolder.INSTANCE;
  190. }
  191.  
  192. public String getProperty(String key) {
  193. return props.getProperty(key);
  194. }
  195.  
  196. public Set<String> getAllPropertyNames() {
  197. return props.stringPropertyNames();
  198. }
  199.  
  200. public boolean containsKey(String key) {
  201. return props.containsKey(key);
  202. }
  203. }
  204.  
  205. package com.cisco.installbase.hiveconnector;
  206.  
  207. import java.sql.Connection;
  208. import java.sql.PreparedStatement;
  209. import java.sql.ResultSet;
  210. import java.sql.SQLException;
  211. import java.sql.Statement;
  212. import java.util.Date;
  213.  
  214. public class CommonDBUtilities {
  215.  
  216. static final String hiveDB = ReadProperties.getInstance().getProperty("hive_db");
  217. static final String logTable = ReadProperties.getInstance().getProperty("IB_log_table");
  218.  
  219. Connection con = CreateConnection.getInstance();
  220. Statement stm = null;
  221. ResultSet rs = null;
  222. PreparedStatement pstat = null;
  223.  
  224. String sql1 = "select max(job_id)+1 from " + hiveDB + "." + logTable;
  225.  
  226. public Date getTime() {
  227. return new Date();
  228. }
  229.  
  230. public int getMaximumJobID() {
  231. int maximumID = -1;
  232. try {
  233. System.out.println(con);
  234. stm = con.createStatement();
  235. rs = stm.executeQuery(sql1);
  236. maximumID = rs.getInt(0);
  237. Constants.setFlag(true);
  238. } catch (SQLException e) {
  239. e.printStackTrace();
  240. Constants.setFlag(false);
  241. }
  242. return maximumID;
  243. }
  244.  
  245. public int getCounts(String query) {
  246. int count = 0;
  247. try {
  248. stm = con.createStatement();
  249. rs = stm.executeQuery(query);
  250. count = rs.getInt(0);
  251. Constants.setFlag(true);
  252. } catch (SQLException e) {
  253. e.printStackTrace();
  254. Constants.setFlag(false);
  255. }
  256.  
  257. return count;
  258. }
  259.  
  260. public void InsertToTable(String insertquery) {
  261. try {
  262. pstat = con.prepareStatement(insertquery);
  263. Constants.setFlag(true);
  264. } catch (SQLException e) {
  265. e.printStackTrace();
  266. Constants.setFlag(false);
  267. }
  268. }
  269. }
  270.  
  271. package com.cisco.installbase.hiveconnector;
  272.  
  273. import java.sql.DriverManager;
  274. import java.sql.SQLException;
  275. import java.sql.Connection;
  276.  
  277. public class CreateConnection {
  278.  
  279. private static Connection instance = null;
  280. static final String drivername = "org.apache.hive.jdbc.HiveDriver";
  281.  
  282. private CreateConnection() {
  283.  
  284. try {
  285. Class.forName(drivername);
  286. // instance =
  287. // DriverManager.getConnection("jdbc:hive2://hddev-c01-edge-01:20000/",
  288. // "phodisvc", "B1GD4T4dev");
  289. // for hive 1 use this ------> instance =
  290. // DriverManager.getConnection("thrift://hddev-c01-edge-02:9083");
  291. instance = DriverManager.getConnection("jdbc:hive://hddev-c01-edge-01:9083/");
  292. System.out.println("get instance" + instance);
  293. Constants.setFlag(true);
  294. } catch (ClassNotFoundException e) {
  295. e.printStackTrace();
  296. Constants.setFlag(false);
  297. } catch (SQLException e) {
  298. e.printStackTrace();
  299. Constants.setFlag(false);
  300. } catch (Exception e) {
  301. e.printStackTrace();
  302. }
  303. }
  304.  
  305. public static Connection getInstance() {
  306. if (instance == null) {
  307. instance = (Connection) new CreateConnection();
  308. }
  309. Constants.setFlag(true);
  310. return instance;
  311. }
  312. }
  313.  
  314. package com.cisco.installbase.hiveconnector;
  315.  
  316. import java.io.IOException;
  317. //import java.net.URI;
  318. //import java.net.URISyntaxException;
  319.  
  320. import org.apache.hadoop.conf.Configuration;
  321. import org.apache.hadoop.fs.FileSystem;
  322. import org.apache.hadoop.fs.Path;
  323.  
  324. public class ShellUtilities {
  325.  
  326. String target_dir = ReadProperties.getInstance().getProperty("target_dir");
  327. String tablename = "";
  328. // String maprfsURI = "maprfs://hdnprd-c01-r01-01:7222";
  329.  
  330. Configuration conf = new Configuration();
  331. FileSystem fs = null;
  332. String dir = " ";
  333.  
  334. public void DeleteDirectory(String tablename) {
  335. String fullpath = target_dir + tablename;
  336. try {
  337. // fs = FileSystem.get(new URI(maprfsURI), conf);
  338. fs = FileSystem.get(conf);
  339. Constants.setFlag(true);
  340. } catch (IOException e) {
  341. e.printStackTrace();
  342. Constants.setFlag(false);
  343. }
  344. // } catch (URISyntaxException e) {
  345. // e.printStackTrace();
  346. // Constants.setFlag(false);
  347. // }
  348. Path directory = new Path(fullpath);
  349.  
  350. try {
  351. if (fs.exists(directory)) {
  352. fs.delete(directory, true);
  353. }
  354. Constants.setFlag(true);
  355. } catch (IOException e) {
  356. e.printStackTrace();
  357. Constants.setFlag(false);
  358. }
  359.  
  360. }
  361. }
  362.  
  363. package com.cisco.installbase.hiveconnector;
  364.  
  365. import java.io.IOException;
  366.  
  367. public class SqoopUtility {
  368.  
  369. private static final String connectionString = "'" + ReadProperties.getInstance().getProperty("dburl") + "'";
  370. @SuppressWarnings("unused")
  371. private static final String edgeNode = ReadProperties.getInstance().getProperty("EDGE_HIVE_CONN");
  372. private static final String targetDir = ReadProperties.getInstance().getProperty("target_dir") + "/";
  373. private static final String userName = ReadProperties.getInstance().getProperty("user_name");
  374. private static final String password = ReadProperties.getInstance().getProperty("password");
  375. private static final String sqoopEdgeNode = ReadProperties.getInstance().getProperty("SQOOP_EDGE_CONN");
  376. private static final String hiveDB = ReadProperties.getInstance().getProperty("hive_db");
  377.  
  378. String[] command = { "sh", "/apps/pentaho_nfs/installbase/input/poc/parallel.sh", sqoopEdgeNode, connectionString,
  379. userName, password, targetDir, hiveDB };
  380. ProcessBuilder processBuilder = null;
  381. @SuppressWarnings("unused")
  382. private Process spawnProcess = null;
  383.  
  384. public void runSqoop() {
  385. processBuilder = new ProcessBuilder(command);
  386. try {
  387. spawnProcess = processBuilder.start();
  388. Constants.setFlag(true);
  389. } catch (IOException e) {
  390. e.printStackTrace();
  391. Constants.setFlag(false);
  392. }
  393. }
  394. }
  395.  
  396. package com.cisco.installbase.hiveconnector;
  397.  
  398. public class Constants {
  399.  
  400. private static boolean flag = false;
  401.  
  402. public static boolean isFlag() {
  403. return flag;
  404. }
  405.  
  406. public static void setFlag(boolean flag) {
  407. Constants.flag = flag;
  408. }
  409.  
  410. private static String mtlSystems = "MTL_SYSTEM_ITEMS_B";
  411. private static String productLine = "XXCCS_DS_CVDPRDLINE_DETAIL";
  412. private static String instanceDetail = "XXCCS_DS_INSTANCE_DETAIL";
  413. private static String productProfile = "XXCCS_SCDC_PRODUCT_PROFILE";
  414. private static String headerCore = "XXCCS_DS_SAHDR_CORE";
  415.  
  416. public static String getMtlSystems() {
  417. return mtlSystems;
  418. }
  419.  
  420. public static String getProductLine() {
  421. return productLine;
  422. }
  423.  
  424. public static String getInstanceDetail() {
  425. return instanceDetail;
  426. }
  427.  
  428. public static String getProductProfile() {
  429. return productProfile;
  430. }
  431.  
  432. public static String getHeaderCore() {
  433. return headerCore;
  434. }
  435. }
  436.  
  437. // private static boolean startLogTableFlag = false;
  438. // private static boolean ingestionFlag = false;
  439. // private static boolean recordCountFlag = false;
  440. // private static boolean stopLogTableFlag = false;
  441. // private static boolean runtimeLogTableFlag = false;
  442. // private static boolean writeToLogTableFlag = false;
  443. // public static boolean isStartLogTableFlag() {
  444. // return startLogTableFlag;
  445. // }
  446. //
  447. // public static boolean isIngestionFlag() {
  448. // return ingestionFlag;
  449. // }
  450. //
  451. // public static boolean isRecordCountFlag() {
  452. // return recordCountFlag;
  453. // }
  454. //
  455. // public static boolean isStopLogTableFlag() {
  456. // return stopLogTableFlag;
  457. // }
  458. //
  459. // public static boolean isRuntimeLogTableFlag() {
  460. // return runtimeLogTableFlag;
  461. // }
  462. //
  463. // public static boolean isWriteToLogTableFlag() {
  464. // return writeToLogTableFlag;
  465. // }
  466. //
  467. // public static void setStartLogTableFlag(boolean startLogTableFlag) {
  468. // Constants.startLogTableFlag = startLogTableFlag;
  469. // }
  470. //
  471. // public static void setIngestionFlag(boolean ingestionFlag) {
  472. // Constants.ingestionFlag = ingestionFlag;
  473. // }
  474. //
  475. // public static void setRecordCountFlag(boolean recordCountFlag) {
  476. // Constants.recordCountFlag = recordCountFlag;
  477. // }
  478. //
  479. // public static void setStopLogTableFlag(boolean stopLogTableFlag) {
  480. // Constants.stopLogTableFlag = stopLogTableFlag;
  481. // }
  482. //
  483. // public static void setRuntimeLogTableFlag(boolean runtimeLogTableFlag) {
  484. // Constants.runtimeLogTableFlag = runtimeLogTableFlag;
  485. // }
  486. //
  487. // public static void setWriteToLogTableFlag(boolean writeToLogTableFlag) {
  488. // Constants.writeToLogTableFlag = writeToLogTableFlag;
  489. // }
  490.  
  491. // IB Tables getters
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement