Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- 2a3
- >
- 10c11
- < # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
- ---
- > # | Copyright Mathias Kettner 2013 mk@mathias-kettner.de |
- 112a114,122
- > def get_influx_data(hostname, service_description, varname, cf, fromtime, untiltime, influx_client):
- > service_desc_influx = service_description.replace(" ", "")
- > hostname_influx = hostname.replace(".", "_")
- > data_raw = list(influx_client.query("select max(value) from %s where time > %is and time < %is and host='%s' and check='%s' group by time(2m);" % (service_desc_influx, fromtime, untiltime, hostname_influx, varname))[service_desc_influx])
- > data_list = []
- > for d in data_raw:
- > data_list.append(d["max"])
- > return 120, data_list
- >
- 116,126d125
- < # Check wether a certain time stamp lies with in daylight safing time (DST)
- < def is_dst(timestamp):
- < return time.localtime(timestamp).tm_isdst
- <
- < # Returns the timezone *including* DST shift at a certain point of time
- < def timezone_at(timestamp):
- < if is_dst(timestamp):
- < return time.altzone
- < else:
- < return time.timezone
- <
- 129c128
- < day_of_epoch, rel_time = divmod(t - timezone_at(t), 86400)
- ---
- > day_of_epoch, rel_time = divmod(t - time.timezone, 86400)
- 133c132
- < return "everyday", (t - timezone_at(t)) % 86400
- ---
- > return "everyday", (t - time.timezone) % 86400
- 138c137
- < return str(mday), (t - timezone_at(t)) % 86400
- ---
- > return str(mday), (t - time.timezone) % 86400
- 141,142c140
- < return "everyhour", (t - timezone_at(t)) % 3600
- <
- ---
- > return "everyhour", (t - time.timezone) % 3600
- 146,148c144,146
- < "slice" : 86400, # 7 slices
- < "groupby" : group_by_wday,
- < "valid" : 7,
- ---
- > "slice" : 86400, # 7 slices
- > "groupby" : group_by_wday,
- > "valid" : 7,
- 151,153c149,151
- < "slice" : 86400, # 31 slices
- < "groupby" : group_by_day_of_month,
- < "valid" : 28,
- ---
- > "slice" : 86400, # 31 slices
- > "groupby" : group_by_day_of_month,
- > "valid" : 28,
- 156,158c154,156
- < "slice" : 86400, # 1 slice
- < "groupby" : group_by_day,
- < "valid" : 1,
- ---
- > "slice" : 86400, # 1 slice
- > "groupby" : group_by_day,
- > "valid" : 1,
- 161,163c159,161
- < "slice" : 3600, # 1 slice
- < "groupby" : group_by_everyhour,
- < "valid" : 24,
- ---
- > "slice" : 3600, # 1 slice
- > "groupby" : group_by_everyhour,
- > "valid" : 24,
- 174a173,219
- > def compute_prediction_influxdb(pred_file, timegroup, params, period_info, from_time, dsname, cf, ic):
- > import math, json
- >
- > begin = from_time
- > slices = []
- > absolute_begin = from_time - params["horizon"] * 86400
- >
- > smallest_step=120
- > while begin >= absolute_begin:
- > tg, fr, un, rel = get_prediction_timegroup(begin, period_info)
- > if tg == timegroup:
- > step, data = get_influx_data(g_hostname, g_service_description,
- > dsname, cf, fr, un-1, ic)
- > slices.append((fr, data))
- > begin -= period_info["slice"]
- >
- >
- > num_points=len(slices[0][1])
- > consolidated = []
- > for i in xrange(num_points):
- > point_line=[]
- > for from_time, data in slices:
- > if i < len(data):
- > d = data[i]
- > if d != None:
- > point_line.append(d)
- > if point_line:
- > average = sum(point_line) / len(point_line)
- > consolidated.append([
- > average,
- > min(point_line),
- > max(point_line),
- > stdev(point_line, average),
- > ])
- > else:
- > consolidated.append([None, None, None, None])
- >
- > result = {
- > "num_points" : num_points,
- > "step" : smallest_step,
- > "columns" : [ "average", "min", "max", "stdev" ],
- > "points" : consolidated,
- > }
- > return result
- >
- >
- >
- 183,184c228
- <
- < # The resolutions of the different time ranges differ. We interpolate
- ---
- > # The resolution of the different time ranges differs. We interpolate
- 186c230
- < # finest resolution. We also assume, that each step is always dividable
- ---
- > # finest resolution. We also assume, that step step is always dividable
- 188,192d231
- <
- < # Note: due to the f**king DST, we can have several shifts between
- < # DST and non-DST during are computation. We need to compensate for
- < # those. DST swaps within slices are being ignored. The DST flag
- < # is checked against the beginning of the slice.
- 206c245
- < num_points = len(slices[0][2])
- ---
- > num_points = len(slices[0][2]) # The number of data points from the latest day
- 248a288,289
- > from influxdb import InfluxDBClient
- > influx_settings = read_influx_config('/etc/check_mk/influxdb.config')
- 291c332
- < last_info = None
- ---
- > last_info = None
- 315c356,360
- < prediction = compute_prediction(pred_file, timegroup, params, period_info, from_time, dsname, cf)
- ---
- > if influx_settings['use_influx']:
- > client = InfluxDBClient(influx_settings['host'], influx_settings['port'], influx_settings['user'], influx_settings['password'], influx_settings['database'])
- > prediction = compute_prediction_influxdb(pred_file, timegroup, params, period_info, from_time, dsname, cf, client)
- > else:
- > prediction = compute_prediction(pred_file, timegroup, params, period_info, from_time, dsname, cf)
- 361a407,422
- >
- > def read_influx_config(config_path):
- > import ConfigParser as cp
- > config = cp.SafeConfigParser({'host' : 'localhost', 'port' : '8086', 'user' : 'root', 'password' : 'root', 'database' : 'checkmk', 'use_influx' : 'False'})
- > config.read(config_path)
- > section = 'InfluxSettings'
- > options = config.options(section)
- > settings = {}
- > for option in options:
- > if option == 'use_influx':
- > settings[option] = config.getboolean(section, option)
- > elif option == 'port':
- > settings[option] = config.getint(section, option)
- > else:
- > settings[option] = config.get(section, option)
- > return settings
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement