Advertisement
Guest User

ssccpptt

a guest
Dec 23rd, 2016
119
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 15.94 KB | None | 0 0
  1. #!/bin/bash
  2.  
  3. #########################################################
  4. # Created by Dillon Torgersen #
  5. # Tool designed primarily for support. #
  6. # Anyone is welcome to use, change, and redistribute. #
  7. # Last Update 12/01/16 #
  8. #########################################################
  9.  
  10. # RUN WITH ELEVATED PRIVILEGES
  11.  
  12. #### Variables to alter ####
  13. # Blob size sent through BSB.
  14. blobsize=262144
  15. # Amount of time BSB is to run.
  16. time=60
  17. # Set number of pings sent between both machines.
  18. pings=4
  19. # Set amount of time for sar to track packet transfers
  20. seconds=10
  21. # Raptor stats are significantly scaled back. Increase to 1700 for all lines. Default is 33.
  22. raptor=33
  23.  
  24. # Required to view API parameters.
  25. read -s -p "Provide Admin Password: " AdminPass
  26. host=$(hostname -f)
  27. echo
  28. echo
  29. # Function to gather Cloud Store provider.
  30. echo "Select your Cloud Provider:"
  31. PS3='Choose provider, then select option 10 to Continue: '
  32. options=("AWS" "AT&T" "Atmos" "Azure" "ECS" "Virtustream" "Google" "ViPR" "Swift" "Continue" "Quit")
  33. select opt in "${options[@]}"
  34. do
  35. case $opt in
  36. "AWS")
  37. provider=aws-s3
  38. ;;
  39. "AT&T")
  40. provider=atmos
  41. ;;
  42. "Atmos")
  43. provider=atmos
  44. ;;
  45. "Azure")
  46. provider=azure
  47. ;;
  48. "ECS")
  49. provider=s3
  50. ;;
  51. "Virtustream")
  52. provider=s3
  53. ;;
  54. "Google")
  55. provider=s3
  56. ;;
  57. "ViPR")
  58. provider=s3
  59. ;;
  60. "Swift")
  61. provider=swift-keystone
  62. ;;
  63. "Continue")
  64. break
  65. ;;
  66. "Quit")
  67. exit 1
  68. ;;
  69. *) echo invalid option;;
  70. esac
  71. done
  72. # Gathering Cloud Store information.
  73. endpoint=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep endpoint | awk '{ print $2 }')
  74. accesskey=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep access_key | awk '{ print $2 }')
  75. secretkey=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep credential | awk '{ print $2 }')
  76. endpoint=$(echo "${endpoint:1:${#endpoint}-2}")
  77. accesskey=$(echo "${accesskey:1:${#accesskey}-3}")
  78. secretkey=$(echo "${secretkey:1:${#secretkey}-3}")
  79. epoch=$(date +%s)
  80. metrics=$(cat /opt/maginatics/raptor/raptor.conf | grep -C 15 share | grep password | awk '{ print $2 }')
  81. metrics=$(echo "${metrics:1:${#metrics}-3}")
  82. manage=$(cat /opt/maginatics/raptor/raptor.conf | grep -C 15 server | grep password | awk '{ print $2 }')
  83. manage=$(echo "${manage:1:${#manage}-2}")
  84. # Saving file to /tmp so root is not required.
  85. log_file="/var/log/diagResults_$(date +%s).txt"
  86. # Creates log and redirect requests start.
  87. echo "Gathering system information"
  88. touch log_file
  89. echo "Maginatics Diagnostics Tool" > $log_file
  90. date >> $log_file
  91. echo "Hostname: $host" >> $log_file
  92. echo >> $log_file
  93. echo "Version/History: " >> $log_file
  94. curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/system_info.json | python -m json.tool >> $log_file
  95. echo >> $log_file
  96.  
  97. # Gathers CPU stats.
  98. echo "Checking system resources"
  99. echo -e "\033[32m"
  100. echo "------------------- CPU -----------------" >> $log_file
  101. echo -e "\033[0m"
  102. mpstat >> $log_file
  103. echo >> $log_file
  104. sar -u 1 5 >> $log_file
  105. echo -e "\033[32m"
  106. echo >> $log_file
  107. echo >> $log_file
  108. echo -e "\033[32m"
  109.  
  110. # Determining RAM utilization.
  111. echo "------------------- RAM -----------------" >> $log_file
  112. echo -e "\033[0m"
  113. free -h >> $log_file
  114. echo >> $log_file
  115. echo "Current swap usage" >> $log_file
  116. sar -W 1 5 >> $log_file
  117. echo >> $log_file
  118. echo >> $log_file
  119. echo -e "\033[32m"
  120.  
  121. # Gathering information on disk stats.
  122. echo "------------------- DISK -----------------" >> $log_file
  123. echo -e "\033[0m"
  124. df -h >> $log_file
  125. echo >> $log_file
  126. echo "Current Disk stats (SIZE)" >> $log_file
  127. iostat -m >> $log_file
  128. echo >> $log_file
  129. echo "Average Disk stats (TIME) over 5 seconds" >> $log_file
  130. sar -d 1 5 | grep Average >> $log_file
  131. echo >> $log_file
  132. echo >> $log_file
  133.  
  134. # Performing BSV and BSB actions against cloud store specified.
  135. echo "Gathering Cloud Store information (may take longer)"
  136. echo -e "\033[32m"
  137. echo "------------------- CLOUD STORE -----------------" >> $log_file
  138. echo -e "\033[0m"
  139. echo "BSV Report: " >> $log_file
  140. endpoint=$(echo "${endpoint:0:${#endpoint}-1}")
  141. if [[ "$provider" == "aws-s3" ]]
  142. then
  143. echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey validate" >> $log_file
  144. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey validate >> $log_file
  145. else
  146. echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey validate" >> $log_file
  147. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey validate >> $log_file
  148. fi
  149. echo >> $log_file
  150. echo >> $log_file
  151. echo "BSB Report: " >> $log_file
  152. if [[ "$provider" == "aws-s3" ]]
  153. then
  154. echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark-$epoch WRITE" >> $log_file
  155. echo >> $log_file
  156. echo " -------- 16 Parallel requests -------- " >> $log_file
  157. echo "Performing 16 Parallel requests"
  158. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark16-$epoch WRITE >> $log_file
  159. echo >> $log_file
  160. echo " -------- 64 Parallel requests -------- " >> $log_file
  161. echo "Performing 64 Parallel requests"
  162. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 64 --blob-size $blobsize --max-runtime $time --container cbbenchmark64-$epoch WRITE >> $log_file
  163. echo >> $log_file
  164. echo " -------- 128 Parallel requests -------- " >> $log_file
  165. echo "Performing 128 Parallel requests"
  166. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 128 --blob-size $blobsize --max-runtime $time --container cbbenchmark128-$epoch WRITE >> $log_file
  167.  
  168. else
  169. echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark-$epoch WRITE" >> $log_file
  170. echo >> $log_file
  171. echo " -------- 16 Parallel requests --------" >> $log_file
  172. echo "Performing 16 Parallel requests"
  173. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark16-$epoch WRITE >> $log_file
  174. echo >> $log_file
  175. echo " -------- 64 Parallel requests -------- " >> $log_file
  176. echo "Performing 64 Parallel requests"
  177. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 64 --blob-size $blobsize --max-runtime $time --container cbbenchmark64-$epoch WRITE >> $log_file
  178. echo >> $log_file
  179. echo " -------- 128 Parallel requests -------- " >> $log_file
  180. echo "Performing 128 Parallel requests"
  181. java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 128 --blob-size $blobsize --max-runtime $time --container cbbenchmark128-$epoch WRITE >> $log_file
  182. fi
  183. echo >> $log_file
  184. echo >> $log_file
  185.  
  186. # Performing network analysis, specifically a DIG report and MTR to CPS and Cloud Store.
  187. echo "Checking network connections"
  188. echo -e "\033[32m"
  189. echo "------------------- NETWORK -----------------" >> $log_file
  190. echo -e "\033[0m"
  191. echo "Route Configuration" >> $log_file
  192. echo "NOTE: VLAN will show under Iface column with INTERFACE.VLAN" >> $log_file
  193. route >> $log_file
  194. echo >> $log_file
  195. routel | grep 'target\|eth' >> $log_file
  196. echo "DNS" >> $log_file
  197. dig $host >> $log_file
  198. echo >> $log_file
  199. echo "My Trace Route" >> $log_file
  200. echo >> $log_file
  201. echo "EMC Cloud Portal: " >> $log_file
  202. echo >> $log_file
  203. echo "mtr -br -c $pings api.dpccloud.com -P 443 -T" >> $log_file
  204. mtr -br -c $pings console.dpccloud.com -P 443 -T >> $log_file
  205. echo >> $log_file
  206. echo >> $log_file
  207. echo "Cloud Store: " >> $log_file
  208. echo >> $log_file
  209. endpoint=$(echo "${endpoint:8:${#endpoint}-1}")
  210. echo "mtr -br -c $pings $endpoint -P 443 -T" >> $log_file
  211. mtr -br -c $pings $endpoint -P 443 -T >> $log_file
  212. echo >> $log_file
  213. echo "Average Packet Transfers by Interface" >> $log_file
  214. echo >> $log_file
  215. sar -n DEV $seconds 1 | grep Average >> $log_file
  216. echo >> $log_file
  217. echo >> $log_file
  218. echo >> $log_file
  219.  
  220. # Verifying share availability.
  221. echo "Checking share availability"
  222. echo -e "\033[32m"
  223. echo "------------------- SHARE -----------------" >> $log_file
  224. echo -e "\033[0m"
  225. echo "Mount points:" >> $log_file
  226. mount >> $log_file
  227. echo >> $log_file
  228. echo >> $log_file
  229. echo "magfsadmin:" >> $log_file
  230. echo "magfsadmin --snapshotRoots" >> $log_file
  231. magfsadmin --snapshotRoots >> $log_file
  232. echo >> $log_file
  233. echo >> $log_file
  234. echo -e "\033[32m"
  235.  
  236. # Getting the appliance health and state.
  237. echo "------------------- SERVICES HEALTH -----------------" >> $log_file
  238. echo -e "\033[0m"
  239. curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/virtual_machines.json | python -m json.tool >> $log_file
  240. echo >> $log_file
  241. curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/site_caches.json | python -m json.tool >> $log_file
  242. echo >> $log_file
  243. echo >> $log_file
  244.  
  245. # Gathers magfs client stats
  246. echo "Getting MagFS client stats"
  247. echo -e "\033[32m"
  248. echo "------------------- MAGFS -----------------" >> $log_file
  249. echo -e "\033[0m"
  250. echo " -------- Transports -------- " >> $log_file
  251. magfsadmin --getStatistics | grep 'Operation\|transport' >> $log_file
  252. echo >> $log_file
  253. echo " -------- MagFS -------- " >> $log_file
  254. magfsadmin --getStatistics | grep 'Operation\|magfs' >> $log_file
  255. echo >> $log_file
  256. echo " -------- Raptor -------- " >> $log_file
  257. magfsadmin --getStatistics | grep 'Operation\|raptor' >> $log_file
  258. echo >> $log_file
  259. echo " -------- Glue -------- " >> $log_file
  260. magfsadmin --getStatistics | grep 'Operation\|glue' >> $log_file
  261. echo >> $log_file
  262. echo >> $log_file
  263.  
  264. # Gathers details from the site-cache and only displaying the top half.
  265. echo "Reviewing cache results"
  266. echo -e "\033[32m"
  267. echo "------------------- CACHE -----------------" >> $log_file
  268. echo -e "\033[0m"
  269. echo "Checking if Site-cache is present."
  270. if [ -f /var/log/hippocampus/hippo.log ]
  271. then
  272. echo "Gathering site-cache details."
  273. curl -s -k -u admin:$metrics https://$host:8443/metrics | python -m json.tool | head -n 98 >> $log_file
  274. else
  275. echo "Site-cache is not enabled or initialized."
  276. fi
  277. echo >> $log_file
  278. echo >> $log_file
  279. echo -e "\033[32m"
  280.  
  281. # Gathers details from the Raptor stats.
  282. echo "Reviewing cache results"
  283. echo -e "\033[32m"
  284. echo "------------------- RAPTOR -----------------" >> $log_file
  285. echo -e "\033[0m"
  286. echo "Gathering management server details."
  287. echo "Raptor Metrics" >> $log_file
  288. curl -k -v https://admin:$manage@127.0.0.1:9000/raptor/metrics | python -m json.tool | head -n $raptor >> $log_file
  289. echo >> $log_file
  290. echo >> $log_file
  291. echo "Raptor stats" >> $log_file
  292. curl -k -v https://admin:$manage@127.0.0.1:9000/raptor/stats >> $log_file
  293. echo >> $log_file
  294. echo >> $log_file
  295. echo -e "\033[32m"
  296.  
  297. # Database queries to gather details on appliance DB, including status, deduplication, and distribution.
  298. echo "------------------- DATABASE -----------------" >> $log_file
  299. echo -e "\033[0m"
  300. # Database queries created by Thomas Sandholm
  301. # Use in association with .xls sheet to assist in determining metadata disk space requirements.
  302. echo "Querying database details."
  303. echo "[`date`] Start of DB Analysis..." >> $log_file 2>&1
  304. echo >> $log_file
  305. echo "[`date`] Checking chunk-level deduplication" >> $log_file 2>&1
  306. mysql -e "select sum(count) as total, count(*) uniq, (sum(count)/count(*)) as deduprate from raptor.chunk_meta where state != 'phantom'" >> $log_file 2>&1
  307. echo >> $log_file
  308. echo >> $log_file
  309. echo "[`date`] Checking inode file distribution" >> $log_file 2>&1
  310. cat > dist.sql.tmp <<ANY
  311. SET group_concat_max_len = 10485760; #10MB max length
  312. SELECT
  313. CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
  314. GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
  315. ',', 5/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 5th,
  316. CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
  317. GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
  318. ',', 25/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 25th,
  319. CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
  320. GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
  321. ',', 50/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 50th,
  322. CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
  323. GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
  324. ',', 50/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 75th,
  325. CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
  326. GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
  327. ',', 95/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 95th,
  328. COUNT(*) AS Total
  329. FROM inodes where not(attributes & 16)
  330. ANY
  331. mysql < dist.sql.tmp raptor >> $log_file 2>&1
  332. rm dist.sql.tmp
  333. echo >> $log_file
  334. echo >> $log_file
  335. echo "[`date`] Checking chunk states" >> $log_file 2>&1
  336. mysql -e "select state,count(*) from raptor.chunk_meta group by state" >> $log_file 2>&1
  337. echo "[`date`] Number of small files with potential chunk alignment loss" >> $log_file 2>&1
  338. mysql -e "select count(*) from raptor.inodes where not(attributes & 16) and file_size < 262144" >> $log_file 2>&1
  339. echo >> $log_file
  340. echo >> $log_file
  341. echo "[`date`] Checking Chunk Map Rows" >> $log_file 2>&1
  342. mysql -e "select count(*) from raptor.chunk_map" >> $log_file 2>&1
  343. echo >> $log_file
  344. echo >> $log_file
  345. echo "[`date`] Checking Table status" >> $log_file 2>&1
  346. mysql -e "show table status from raptor" >> $log_file 2>&1
  347. echo >> $log_file
  348. echo >> $log_file
  349. echo -e "\033[32m"
  350.  
  351. # Gathers the latest events and presents in a list.
  352. echo "------------------- LATEST EVENTS -----------------" >> $log_file
  353. echo -e "\033[0m"
  354. curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/events.json | python -m json.tool | head -n 80 >> $log_file
  355. echo >> $log_file
  356. echo >> $log_file
  357. echo -e "\033[32m"
  358. echo "Your file is located in $log_file"
  359. echo -e "\033[0m"
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement