Guest User

Untitled

a guest
Aug 31st, 2017
36
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.75 KB | None | 0 0
  1. #!/bin/bash
  2.  
  3. # load the jar in hive server with the follwing commands
  4.  
  5. # add the JAR in hive node
  6.  
  7. #docker run --rm -v $PWD:/workdir -w /workdir -e HADOOP_USER_NAME=homeaway sequenceiq/hadoop-docker /usr/local/hadoop-2.7.0/bin/hdfs dfs -put /workdir/json-serde-1.3.8-jar-with-dependencies.jar hdfs://asthad011.wvrgroup.internal/apps/hive/warehouse/content_catalog_data
  8.  
  9. # from hive command line do
  10. #ADD JAR hdfs://asthad011.wvrgroup.internal/apps/hive/warehouse/content_catalog_data/json-serde-1.3.8-jar-with-dependencies.jar;
  11.  
  12. ######################## SEOTEXT CONTENT TABLE CREATION ######################################################
  13.  
  14. echo "creating TEST image content table for Developer testing"
  15. docker run --rm \
  16. -it akanto/beeline \
  17. -u "jdbc:hive2://asthad010.wvrgroup.internal:10000/content_catalog?hive.tez.java.opts=-Xmx7650m;hive.tez.container.size=7650;tez.queue.name=product" \
  18. --debug \
  19. -n homeaway -e \
  20. 'CREATE TABLE content_catalog.image_contents_prod_json_vivek (
  21. id string,
  22. url string,
  23. etag string,
  24. inserted struct<dateString:string>,
  25. updated struct<dateString:string>,
  26. contentlocation string,
  27. contentsource string,
  28. contenttype string,
  29. flaggedforreview boolean,
  30. geoshape struct<coordinates:array<double>, type:string>,
  31. lbsid string,
  32. tags array<string>,
  33. classifiers array<string>,
  34. image struct<resource:string>
  35. )
  36. ROW FORMAT SERDE "org.apache.hive.hcatalog.data.JsonSerDe"
  37. WITH SERDEPROPERTIES ("mapping.id" = "_id");'
  38.  
  39.  
  40.  
  41. ###################################################################################################
  42.  
  43. echo -e "\n\nGoing to upload imageContent data into HDFS"
  44. docker run --rm -v $PWD:/workdir -w /workdir -e HADOOP_USER_NAME=homeaway sequenceiq/hadoop-docker /usr/local/hadoop-2.7.0/bin/hdfs dfs -put /workdir/vivek_data.json hdfs://asthad011.wvrgroup.internal/apps/hive/warehouse/content_catalog_data
  45. echo -e "Done uploading imageContent data into HDFS\n\n"
  46.  
  47. # Push HDFS Data to CSV Backed Hive Tables
  48.  
  49. echo -e "Going to upload imageContent data from HDFS to HIVE"
  50. docker run --rm akanto/beeline -u "jdbc:hive2://asthad010.wvrgroup.internal:10000/content_catalog?hive.tez.java.opts=-Xmx7650m;hive.tez.container.size=7650;tez.queue.name=product" -n homeaway -e "LOAD DATA INPATH \"/apps/hive/warehouse/content_catalog_data/vivek_data.json\" OVERWRITE INTO TABLE content_catalog.image_contents_prod_json_vivek;"
  51. echo -e "Done uploading imageContent data from HDFS to HIVE\n\n"
  52.  
  53.  
  54. echo -e "Going to query data in hive"
  55. docker run --rm -it akanto/beeline -u "jdbc:hive2://asthad010.wvrgroup.internal:10000/content_catalog?hive.tez.java.opts=-Xmx7650m;hive.tez.container.size=7650;tez.queue.name=product" -n homeaway -e "select * from image_contents_prod_json_vivek;"
  56. echo -e "Taking a look on data"
Add Comment
Please, Sign In to add comment