#!/usr/bin/python
from prometheus_client import start_http_server, Metric, REGISTRY
from threading import Lock
from cachetools import cached, TTLCache
from requests import Request, Session
import requests
import re
import os
import sys
import argparse
import json
import logging
import time
from bs4 import BeautifulSoup
baseurl = 'http://192.168.100.1' # Default IP
username = "leox" #Default login
password = "leolabs_7" #Default password
# lock of the collect method
lock = Lock()
# logging setup
log = logging.getLogger('leox-exporter')
log.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
# caching API for 15s
# Note the api limits: https://pro.coinmarketcap.com/features
cache_ttl = int(os.environ.get('CACHE_TTL', 15))
cache_max_size = int(os.environ.get('CACHE_MAX_SIZE', 10000))
cache = TTLCache(maxsize=cache_max_size, ttl=cache_ttl)
class LeoxClient():
def __init__(self):
self.urls = [
['pon_stats' , baseurl + '/admin/pon-stats.asp?' ],
['pon_status', baseurl + '/status_pon.asp?'],
['ont_status', baseurl + '/status.asp?' ]
]
@cached(cache)
def getinfos(self):
log.info("Mis a jours des information")
infos= []
for url in self.urls:
print(url)
req = requests.get(url[1], auth=(username, password))
soup = BeautifulSoup(req.content, 'html.parser')
d = 0
divs = soup.find_all("div", {"class": "column"})
for div in divs:
d = d + 1
#print("DIV",div)
div_title = div.find("div",{"class": "column_title"})
if div_title :
titre = div_title.find("p").get_text()
#print("TITRE",titre)
data = div.find("div",{"class" :"data_common"})
datav = div.find("div",{"class" :"data_common data_vertical"})
if datav:
print("DATA VERTICAL","datav")
else:
for row in data.findAll("tr"):
dtitre = titre + '_' + row.findAll("th")[0].get_text().replace(" ","_")
dtitre = re.sub(r"\s+", '_', dtitre).lower()
dvalue = row.findAll("td")[0].get_text()
print('ont_leox_' + dtitre + ': ' + dvalue)
infos.append({'name' : 'ont_leox_' + dtitre ,'value' : dvalue})
#print("DATA",data)
if d == 0:
titre = 'PON Statistics'
for row in soup.findAll("tr"):
dtitre = titre + '_' + row.findAll("th")[0].get_text().replace(" ","_")
dtitre = re.sub(r"\s+", '_', dtitre).lower()
dtitre = re.sub(r":", '', dtitre)
dvalue = row.findAll("td")[0].get_text()
print('ont_leox_' + dtitre + ': ' + dvalue)
infos.append({'name' : 'ont_leox_' + dtitre ,'value':dvalue})
#pon_statistics
#<div class="data_common data_common_notitle">
return infos
class LeoxCollector():
def __init__(self):
self.client = LeoxClient()
def collect(self):
with lock:
log.info('collecting...')
infos = self.client.getinfos()
metric = Metric('leox', 'Leox metric values', 'gauge')
for info in infos:
#print(info)
name = info['name']
value = info['value']
value_int = False
match = re.search(r'^[\-0-9.]+', value)
if match:
value_int = match.group(0)
#Remove IP
match = re.search(r'^[0-9]+\.[0-9]+\.[0-9].[0-9]+', value)
if match:
value_int = False
match = re.search(r'^O([0-9])$', value)
if match:
value_int = match.group(1)
print('Colected:' + name + ': ' + value + '>' + str(value_int))
if value_int:
metric.add_sample(name, value=float(value_int), labels={'name': 'LEOX'})
yield metric
if __name__ == '__main__':
try:
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--port', nargs='?', const=9103, help='The TCP port to listen on', default=9103)
parser.add_argument('--addr', nargs='?', const='127.0.0.1', help='The interface to bind to', default='127.0.0.1')
args = parser.parse_args()
log.info('listening on http://%s:%d/metrics' % (args.addr, args.port))
REGISTRY.register(LeoxCollector())
start_http_server(int(args.port), addr=args.addr)
while True:
time.sleep(60)
except KeyboardInterrupt:
print(" Interrupted")
exit(0)