Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from influxdb import InfluxDBClient
- import json
- from datetime import datetime, timedelta
- import time
- import sys
- from scrut_api import *
- config = json.load(open('/home/brian/influx/config.json'))
- influx_client = InfluxDBClient(host=config['influxHost'], port=8086)
- influx_client.switch_database(config['influxDB'])
- # initiate Scurtinizer client.
- client = scrut_api_client(
- hostname=config['hostname'],
- authToken=config['authToken'])
- def backfill_by_hours(number_of_hours):
- pattern = '%Y-%m-%d %H:%M:%S'
- print("starting to backfill data for {} hours".format(number_of_hours))
- #go into interface_list and loop for each exporter.
- if sys.argv[1] == "backfill":
- for exporter in config['interfaceList']:
- #for each interface_ID, go find time time data
- for interface_id in exporter['interfaces']:
- #set the time increments, this makes sure we move back in time and hour each loop.
- end_time_start = 0
- start_time_start = 60
- #loops back for specified number of hours.
- for x in range(0, int(number_of_hours)):
- Start_Time = int(time.mktime(time.strptime((str(datetime.now().replace(
- microsecond=0, second=0) - timedelta(minutes=start_time_start))), pattern)))
- End_Time = int(time.mktime(time.strptime((str(datetime.now().replace(
- microsecond=0, second=0) - timedelta(minutes=end_time_start))), pattern)))
- report_object = scrut_json(filters={'sdfDips_0': 'in_{}_{}-{}'.format(exporter['exporter'],exporter['exporter'],interface_id)},
- reportTypeLang="interfaces",
- times={
- "dateRange": "Custom",
- "start": "{}".format(Start_Time),
- "end": "{}".format(End_Time),
- "clientTimezone": "America/New_York"
- })
- report_format = scrut_data_requested()
- params = scrut_params(
- client=client,
- json_data=report_object.report_json,
- data_requested=report_format.format)
- response = scrut_request(params)
- graph_data = response.data["report"]["graph"]["timeseries"]["inbound"]
- label_data = response.data["report"]["graph"]["pie"]["inbound"]
- #moves time back 1 hour.
- end_time_start = end_time_start + 60
- start_time_start = start_time_start + 60
- #creates a list to be written into influx
- json_body = []
- for value in range (0,len(label_data)):
- label = label_data[value]['label_dns']
- for time_stamp in graph_data[value]:
- original = datetime.fromtimestamp(time_stamp[0])
- new = original + timedelta(hours=4)
- time_data = datetime.fromtimestamp(new.timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
- data = {
- "measurement": "interfaces",
- "time":time_data,
- "fields":{
- "bits":int((time_stamp[1] * 8/60))
- },
- "tags":{
- "application_label":label
- }
- }
- json_body.append(data)
- print("backfilled data for {}".format(time_data))
- influx_client.write_points(json_body)
- elif sys.argv[1] == "foward":
- Start_Time = int(time.mktime(time.strptime((str(datetime.now().replace(microsecond=0, second=0) - timedelta(minutes=5))), pattern)))
- End_Time = int(time.mktime(time.strptime((str(datetime.now().replace(microsecond=0, second=0) - timedelta(minutes=0))), pattern)))
- for exporter in config['interfaceList']:
- #for each interface_ID, go find time time data
- for interface_id in exporter['interfaces']:
- report_object = scrut_json(filters={'sdfDips_0': 'in_{}_{}-{}'.format(exporter['exporter'],exporter['exporter'],interface_id)},
- reportTypeLang="interfaces",
- times={
- "dateRange": "Custom",
- "start": "{}".format(Start_Time),
- "end": "{}".format(End_Time),
- "clientTimezone": "America/New_York"
- })
- report_format = scrut_data_requested()
- params = scrut_params(
- client=client,
- json_data=report_object.report_json,
- data_requested=report_format.format)
- response = scrut_request(params)
- graph_data = response.data["report"]["graph"]["timeseries"]["inbound"]
- label_data = response.data["report"]["graph"]["pie"]["inbound"]
- json_body = []
- for value in range (0,len(label_data)):
- label = label_data[value]['label_dns']
- for time_stamp in graph_data[value]:
- original = datetime.fromtimestamp(time_stamp[0])
- new = original + timedelta(hours=4)
- time_data = datetime.fromtimestamp(new.timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
- data = {
- "measurement": "interfaces",
- "time":time_data,
- "fields":{
- "bits":int((time_stamp[1] * 8/60))
- },
- "tags":{
- "application_label":label
- }
- }
- json_body.append(data)
- print("backfilled data for {}".format(time_data))
- influx_client.write_points(json_body)
- backfill_by_hours(sys.argv[2])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement