Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/bin/bash
- #########################################################
- # Created by Dillon Torgersen #
- # Tool designed primarily for support. #
- # Anyone is welcome to use, change, and redistribute. #
- # Last Update 12/01/16 #
- #########################################################
- # RUN WITH ELEVATED PRIVILEGES
- #### Variables to alter ####
- # Blob size sent through BSB.
- blobsize=262144
- # Amount of time BSB is to run.
- time=60
- # Set number of pings sent between both machines.
- pings=4
- # Set amount of time for sar to track packet transfers
- seconds=10
- # Raptor stats are significantly scaled back. Increase to 1700 for all lines. Default is 33.
- raptor=33
- # Required to view API parameters.
- read -s -p "Provide Admin Password: " AdminPass
- host=$(hostname -f)
- echo
- echo
- # Function to gather Cloud Store provider.
- echo "Select your Cloud Provider:"
- PS3='Choose provider, then select option 10 to Continue: '
- options=("AWS" "AT&T" "Atmos" "Azure" "ECS" "Virtustream" "Google" "ViPR" "Swift" "Continue" "Quit")
- select opt in "${options[@]}"
- do
- case $opt in
- "AWS")
- provider=aws-s3
- ;;
- "AT&T")
- provider=atmos
- ;;
- "Atmos")
- provider=atmos
- ;;
- "Azure")
- provider=azure
- ;;
- "ECS")
- provider=s3
- ;;
- "Virtustream")
- provider=s3
- ;;
- "Google")
- provider=s3
- ;;
- "ViPR")
- provider=s3
- ;;
- "Swift")
- provider=swift-keystone
- ;;
- "Continue")
- break
- ;;
- "Quit")
- exit 1
- ;;
- *) echo invalid option;;
- esac
- done
- # Gathering Cloud Store information.
- endpoint=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep endpoint | awk '{ print $2 }')
- accesskey=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep access_key | awk '{ print $2 }')
- secretkey=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep credential | awk '{ print $2 }')
- endpoint=$(echo "${endpoint:1:${#endpoint}-2}")
- accesskey=$(echo "${accesskey:1:${#accesskey}-3}")
- secretkey=$(echo "${secretkey:1:${#secretkey}-3}")
- epoch=$(date +%s)
- metrics=$(cat /opt/maginatics/raptor/raptor.conf | grep -C 15 share | grep password | awk '{ print $2 }')
- metrics=$(echo "${metrics:1:${#metrics}-3}")
- manage=$(cat /opt/maginatics/raptor/raptor.conf | grep -C 15 server | grep password | awk '{ print $2 }')
- manage=$(echo "${manage:1:${#manage}-2}")
- # Saving file to /tmp so root is not required.
- log_file="/var/log/diagResults_$(date +%s).txt"
- # Creates log and redirect requests start.
- echo "Gathering system information"
- touch log_file
- echo "Maginatics Diagnostics Tool" > $log_file
- date >> $log_file
- echo "Hostname: $host" >> $log_file
- echo >> $log_file
- echo "Version/History: " >> $log_file
- curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/system_info.json | python -m json.tool >> $log_file
- echo >> $log_file
- # Gathers CPU stats.
- echo "Checking system resources"
- echo -e "\033[32m"
- echo "------------------- CPU -----------------" >> $log_file
- echo -e "\033[0m"
- mpstat >> $log_file
- echo >> $log_file
- sar -u 1 5 >> $log_file
- echo -e "\033[32m"
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- # Determining RAM utilization.
- echo "------------------- RAM -----------------" >> $log_file
- echo -e "\033[0m"
- free -h >> $log_file
- echo >> $log_file
- echo "Current swap usage" >> $log_file
- sar -W 1 5 >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- # Gathering information on disk stats.
- echo "------------------- DISK -----------------" >> $log_file
- echo -e "\033[0m"
- df -h >> $log_file
- echo >> $log_file
- echo "Current Disk stats (SIZE)" >> $log_file
- iostat -m >> $log_file
- echo >> $log_file
- echo "Average Disk stats (TIME) over 5 seconds" >> $log_file
- sar -d 1 5 | grep Average >> $log_file
- echo >> $log_file
- echo >> $log_file
- # Performing BSV and BSB actions against cloud store specified.
- echo "Gathering Cloud Store information (may take longer)"
- echo -e "\033[32m"
- echo "------------------- CLOUD STORE -----------------" >> $log_file
- echo -e "\033[0m"
- echo "BSV Report: " >> $log_file
- endpoint=$(echo "${endpoint:0:${#endpoint}-1}")
- if [[ "$provider" == "aws-s3" ]]
- then
- echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey validate" >> $log_file
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey validate >> $log_file
- else
- echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey validate" >> $log_file
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey validate >> $log_file
- fi
- echo >> $log_file
- echo >> $log_file
- echo "BSB Report: " >> $log_file
- if [[ "$provider" == "aws-s3" ]]
- then
- echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark-$epoch WRITE" >> $log_file
- echo >> $log_file
- echo " -------- 16 Parallel requests -------- " >> $log_file
- echo "Performing 16 Parallel requests"
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark16-$epoch WRITE >> $log_file
- echo >> $log_file
- echo " -------- 64 Parallel requests -------- " >> $log_file
- echo "Performing 64 Parallel requests"
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 64 --blob-size $blobsize --max-runtime $time --container cbbenchmark64-$epoch WRITE >> $log_file
- echo >> $log_file
- echo " -------- 128 Parallel requests -------- " >> $log_file
- echo "Performing 128 Parallel requests"
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 128 --blob-size $blobsize --max-runtime $time --container cbbenchmark128-$epoch WRITE >> $log_file
- else
- echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark-$epoch WRITE" >> $log_file
- echo >> $log_file
- echo " -------- 16 Parallel requests --------" >> $log_file
- echo "Performing 16 Parallel requests"
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark16-$epoch WRITE >> $log_file
- echo >> $log_file
- echo " -------- 64 Parallel requests -------- " >> $log_file
- echo "Performing 64 Parallel requests"
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 64 --blob-size $blobsize --max-runtime $time --container cbbenchmark64-$epoch WRITE >> $log_file
- echo >> $log_file
- echo " -------- 128 Parallel requests -------- " >> $log_file
- echo "Performing 128 Parallel requests"
- java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 128 --blob-size $blobsize --max-runtime $time --container cbbenchmark128-$epoch WRITE >> $log_file
- fi
- echo >> $log_file
- echo >> $log_file
- # Performing network analysis, specifically a DIG report and MTR to CPS and Cloud Store.
- echo "Checking network connections"
- echo -e "\033[32m"
- echo "------------------- NETWORK -----------------" >> $log_file
- echo -e "\033[0m"
- echo "Route Configuration" >> $log_file
- echo "NOTE: VLAN will show under Iface column with INTERFACE.VLAN" >> $log_file
- route >> $log_file
- echo >> $log_file
- routel | grep 'target\|eth' >> $log_file
- echo "DNS" >> $log_file
- dig $host >> $log_file
- echo >> $log_file
- echo "My Trace Route" >> $log_file
- echo >> $log_file
- echo "EMC Cloud Portal: " >> $log_file
- echo >> $log_file
- echo "mtr -br -c $pings api.dpccloud.com -P 443 -T" >> $log_file
- mtr -br -c $pings console.dpccloud.com -P 443 -T >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo "Cloud Store: " >> $log_file
- echo >> $log_file
- endpoint=$(echo "${endpoint:8:${#endpoint}-1}")
- echo "mtr -br -c $pings $endpoint -P 443 -T" >> $log_file
- mtr -br -c $pings $endpoint -P 443 -T >> $log_file
- echo >> $log_file
- echo "Average Packet Transfers by Interface" >> $log_file
- echo >> $log_file
- sar -n DEV $seconds 1 | grep Average >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo >> $log_file
- # Verifying share availability.
- echo "Checking share availability"
- echo -e "\033[32m"
- echo "------------------- SHARE -----------------" >> $log_file
- echo -e "\033[0m"
- echo "Mount points:" >> $log_file
- mount >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo "magfsadmin:" >> $log_file
- echo "magfsadmin --snapshotRoots" >> $log_file
- magfsadmin --snapshotRoots >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- # Getting the appliance health and state.
- echo "------------------- SERVICES HEALTH -----------------" >> $log_file
- echo -e "\033[0m"
- curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/virtual_machines.json | python -m json.tool >> $log_file
- echo >> $log_file
- curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/site_caches.json | python -m json.tool >> $log_file
- echo >> $log_file
- echo >> $log_file
- # Gathers magfs client stats
- echo "Getting MagFS client stats"
- echo -e "\033[32m"
- echo "------------------- MAGFS -----------------" >> $log_file
- echo -e "\033[0m"
- echo " -------- Transports -------- " >> $log_file
- magfsadmin --getStatistics | grep 'Operation\|transport' >> $log_file
- echo >> $log_file
- echo " -------- MagFS -------- " >> $log_file
- magfsadmin --getStatistics | grep 'Operation\|magfs' >> $log_file
- echo >> $log_file
- echo " -------- Raptor -------- " >> $log_file
- magfsadmin --getStatistics | grep 'Operation\|raptor' >> $log_file
- echo >> $log_file
- echo " -------- Glue -------- " >> $log_file
- magfsadmin --getStatistics | grep 'Operation\|glue' >> $log_file
- echo >> $log_file
- echo >> $log_file
- # Gathers details from the site-cache and only displaying the top half.
- echo "Reviewing cache results"
- echo -e "\033[32m"
- echo "------------------- CACHE -----------------" >> $log_file
- echo -e "\033[0m"
- echo "Checking if Site-cache is present."
- if [ -f /var/log/hippocampus/hippo.log ]
- then
- echo "Gathering site-cache details."
- curl -s -k -u admin:$metrics https://$host:8443/metrics | python -m json.tool | head -n 98 >> $log_file
- else
- echo "Site-cache is not enabled or initialized."
- fi
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- # Gathers details from the Raptor stats.
- echo "Reviewing cache results"
- echo -e "\033[32m"
- echo "------------------- RAPTOR -----------------" >> $log_file
- echo -e "\033[0m"
- echo "Gathering management server details."
- echo "Raptor Metrics" >> $log_file
- curl -k -v https://admin:$manage@127.0.0.1:9000/raptor/metrics | python -m json.tool | head -n $raptor >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo "Raptor stats" >> $log_file
- curl -k -v https://admin:$manage@127.0.0.1:9000/raptor/stats >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- # Database queries to gather details on appliance DB, including status, deduplication, and distribution.
- echo "------------------- DATABASE -----------------" >> $log_file
- echo -e "\033[0m"
- # Database queries created by Thomas Sandholm
- # Use in association with .xls sheet to assist in determining metadata disk space requirements.
- echo "Querying database details."
- echo "[`date`] Start of DB Analysis..." >> $log_file 2>&1
- echo >> $log_file
- echo "[`date`] Checking chunk-level deduplication" >> $log_file 2>&1
- mysql -e "select sum(count) as total, count(*) uniq, (sum(count)/count(*)) as deduprate from raptor.chunk_meta where state != 'phantom'" >> $log_file 2>&1
- echo >> $log_file
- echo >> $log_file
- echo "[`date`] Checking inode file distribution" >> $log_file 2>&1
- cat > dist.sql.tmp <<ANY
- SET group_concat_max_len = 10485760; #10MB max length
- SELECT
- CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
- GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
- ',', 5/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 5th,
- CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
- GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
- ',', 25/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 25th,
- CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
- GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
- ',', 50/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 50th,
- CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
- GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
- ',', 50/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 75th,
- CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
- GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
- ',', 95/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 95th,
- COUNT(*) AS Total
- FROM inodes where not(attributes & 16)
- ANY
- mysql < dist.sql.tmp raptor >> $log_file 2>&1
- rm dist.sql.tmp
- echo >> $log_file
- echo >> $log_file
- echo "[`date`] Checking chunk states" >> $log_file 2>&1
- mysql -e "select state,count(*) from raptor.chunk_meta group by state" >> $log_file 2>&1
- echo "[`date`] Number of small files with potential chunk alignment loss" >> $log_file 2>&1
- mysql -e "select count(*) from raptor.inodes where not(attributes & 16) and file_size < 262144" >> $log_file 2>&1
- echo >> $log_file
- echo >> $log_file
- echo "[`date`] Checking Chunk Map Rows" >> $log_file 2>&1
- mysql -e "select count(*) from raptor.chunk_map" >> $log_file 2>&1
- echo >> $log_file
- echo >> $log_file
- echo "[`date`] Checking Table status" >> $log_file 2>&1
- mysql -e "show table status from raptor" >> $log_file 2>&1
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- # Gathers the latest events and presents in a list.
- echo "------------------- LATEST EVENTS -----------------" >> $log_file
- echo -e "\033[0m"
- curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/events.json | python -m json.tool | head -n 80 >> $log_file
- echo >> $log_file
- echo >> $log_file
- echo -e "\033[32m"
- echo "Your file is located in $log_file"
- echo -e "\033[0m"
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement