mirror of
https://github.com/kemko/keenetic-grafana-monitoring.git
synced 2026-01-01 07:35:42 +03:00
Using admin api, instead open port (#1)
* influxdb2 integration * using python alpine base image * add authorization, refactoring config * logging instead print, keenetic api exception handling when try to collect metrics, a little pep8
This commit is contained in:
committed by
GitHub
parent
7661b5819d
commit
16a37bf0d9
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
__pycache__
|
||||
.idea
|
||||
*.iml
|
||||
config/influx.json
|
||||
config/config.ini
|
||||
|
||||
15
Dockerfile
15
Dockerfile
@@ -1,10 +1,11 @@
|
||||
FROM python:3-slim
|
||||
FROM python:3.8-alpine AS dependencies
|
||||
COPY requirements.txt .
|
||||
|
||||
ADD keentic_influxdb_exporter.py /home
|
||||
ADD requirements.txt /home
|
||||
ADD value_normalizer.py /home
|
||||
ADD influxdb_writter.py /home
|
||||
ADD config/metrics.json /home/config/metrics.json
|
||||
RUN pip install --no-cache-dir --user --no-warn-script-location -r requirements.txt
|
||||
|
||||
FROM python:3.8-alpine AS build-image
|
||||
COPY --from=dependencies /root/.local /root/.local
|
||||
|
||||
COPY value_normalizer.py keentic_influxdb_exporter.py influxdb_writter.py keenetic_api.py /home/
|
||||
|
||||
RUN pip install -r /home/requirements.txt
|
||||
CMD [ "python", "-u", "/home/keentic_influxdb_exporter.py" ]
|
||||
47
README.md
47
README.md
@@ -12,29 +12,37 @@
|
||||
|
||||
# Supporter router
|
||||
|
||||
Tested with: Keenetic Ultra (KN-1810) KeeneticOS 3.5.6
|
||||
Tested with:
|
||||
- Keenetic Ultra (KN-1810) KeeneticOS 3.5.6
|
||||
- Keenetic Giga (KN-1010) KeeneticOS 3.5.6
|
||||
|
||||
May works on other Keenetic routers
|
||||
May work on other Keenetic routers
|
||||
|
||||
# Preparation
|
||||
|
||||
* Create InfluxDB configuration file `influx.json`
|
||||
* Create configuration file `config.ini`
|
||||
|
||||
```json
|
||||
{
|
||||
"influxdb": {
|
||||
"host": "<HOST>",
|
||||
"port": 80,
|
||||
"username": "admin",
|
||||
"password": "<PASS>",
|
||||
"db": "keenetic"
|
||||
}
|
||||
}
|
||||
```ini
|
||||
[influxdb]
|
||||
host=<HOST>
|
||||
port=80
|
||||
username=admin
|
||||
password=<INFLUX_PASS>
|
||||
db=keenetic
|
||||
[keenetic]
|
||||
skip_auth=false
|
||||
admin_endpoint=http://192.168.1.1:80
|
||||
login=admin
|
||||
password=<KEENETIC_PASS>
|
||||
[collector]
|
||||
interval_sec=30
|
||||
```
|
||||
|
||||
* Copy [metrics.json](https://github.com/vitaliy-sk/keenetic-grafana-monitoring/blob/master/config/metrics.json) and edit (Optional)
|
||||
|
||||
* Expose Keenetic API on your router
|
||||
* Create admin user (Users and access -> Create user, allow 'Web interface' and 'Prohibit saving system settings')
|
||||
|
||||
* (Alternative to create user) Expose Keenetic API on your router
|
||||
|
||||
For doing this add port forwarding (Network rules -> Forwarding):
|
||||
```
|
||||
@@ -45,6 +53,11 @@ Output: This Keenetic
|
||||
Open the port: 79
|
||||
Destination port: 79
|
||||
```
|
||||
Update `conifg.ini`
|
||||
```
|
||||
[keenetic]
|
||||
skip_auth=true
|
||||
```
|
||||
|
||||
* Import Grafana dashboard from [grafana.com](https://grafana.com/grafana/dashboards/12723)
|
||||
|
||||
@@ -81,11 +94,11 @@ services:
|
||||
# environment:
|
||||
# - TZ=Europe/Kiev
|
||||
volumes:
|
||||
- ./config/influx.json:/home/config/influx.json
|
||||
- ./config/metrics.json:/home/config/metrics.json
|
||||
- ./config/config.ini:/home/config/config.ini:ro
|
||||
- ./config/metrics.json:/home/config/metrics.json:ro
|
||||
restart: always
|
||||
```
|
||||
|
||||
# Build Docker image
|
||||
|
||||
`docker build -t keenetic-grafana-monitoring .`
|
||||
`docker build -t keenetic-grafana-monitoring .`
|
||||
13
config/config.ini.sample
Normal file
13
config/config.ini.sample
Normal file
@@ -0,0 +1,13 @@
|
||||
[influxdb]
|
||||
host=<HOST>
|
||||
port=80
|
||||
username=admin
|
||||
password=<INFLUX_PASS>
|
||||
db=keenetic
|
||||
[keenetic]
|
||||
admin_endpoint=http://192.168.1.1:80
|
||||
skip_auth=false
|
||||
login=admin
|
||||
password=<KEENETIC_PASS>
|
||||
[collector]
|
||||
interval_sec=30
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"influxdb": {
|
||||
"host": "<HOST>",
|
||||
"port": 80,
|
||||
"username": "admin",
|
||||
"password": "<PASS>",
|
||||
"db": "keenetic"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"endpoint" : "http://192.168.1.1:79/rci",
|
||||
"interval_sec" : 30,
|
||||
"metrics" : [
|
||||
{
|
||||
"command": "processes",
|
||||
|
||||
@@ -7,6 +7,6 @@ services:
|
||||
# environment:
|
||||
# - TZ=Europe/Kiev
|
||||
volumes:
|
||||
- ./config/influx.json:/home/config/influx.json
|
||||
- ./config/metrics.json:/home/config/metrics.json
|
||||
- ./config/config.ini:/home/config/config.ini:ro
|
||||
- ./config/metrics.json:/home/config/metrics.json:ro
|
||||
restart: always
|
||||
@@ -1,22 +1,27 @@
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from influxdb import InfluxDBClient
|
||||
|
||||
class InfuxWritter(object):
|
||||
|
||||
class InfuxWriter(object):
|
||||
|
||||
def __init__(self, configuration):
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
self._configuration = configuration['influxdb']
|
||||
self._client = InfluxDBClient(self._configuration['host'], self._configuration['port'], self._configuration['username'], self._configuration['password'], self._configuration['db'])
|
||||
self._configuration = configuration
|
||||
self._client = InfluxDBClient(self._configuration['host'], self._configuration['port'],
|
||||
self._configuration['username'], self._configuration['password'],
|
||||
self._configuration['db'])
|
||||
self.init_database()
|
||||
|
||||
def init_database(self):
|
||||
print("Connecting to InfluxDB: " + self._configuration['host'])
|
||||
logging.info("Connecting to InfluxDB: " + self._configuration['host'])
|
||||
db_name = self._configuration['db']
|
||||
# self._client.drop_database(db_name)
|
||||
|
||||
if db_name not in self._client.get_list_database():
|
||||
print("Creating InfluxDB database: " + db_name)
|
||||
logging.info("Creating InfluxDB database: " + db_name)
|
||||
self._client.create_database(db_name)
|
||||
|
||||
def write_metrics(self, metrics):
|
||||
self._client.write_points( metrics )
|
||||
self._client.write_points(metrics)
|
||||
|
||||
60
keenetic_api.py
Normal file
60
keenetic_api.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import hashlib
|
||||
from hashlib import sha256
|
||||
from typing import Dict
|
||||
from urllib import parse
|
||||
|
||||
from requests import Session
|
||||
|
||||
|
||||
class KeeneticClient:
|
||||
|
||||
def __init__(self, admin_endpoint: str, skip_auth: bool, login: str, password: str):
|
||||
self._admin_endpoint = admin_endpoint
|
||||
self._skip_auth = skip_auth
|
||||
self._login = login
|
||||
self._password = password
|
||||
|
||||
def __enter__(self):
|
||||
self._session = Session()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._session:
|
||||
self._session.close()
|
||||
|
||||
def _auth(self) -> bool:
|
||||
if self._skip_auth:
|
||||
return True
|
||||
auth_endpoint = f"{self._admin_endpoint}/auth"
|
||||
check_auth_response = self._session.get(auth_endpoint)
|
||||
if check_auth_response.status_code == 401:
|
||||
ndm_challenge = check_auth_response.headers.get('X-NDM-Challenge')
|
||||
ndm_realm = check_auth_response.headers.get('X-NDM-Realm')
|
||||
md5 = hashlib.md5((self._login + ':' + ndm_realm + ':' + self._password).encode('utf-8')).hexdigest()
|
||||
sha = sha256((ndm_challenge + md5).encode('utf-8')).hexdigest()
|
||||
auth_response = self._session.post(auth_endpoint, json={'login': self._login, 'password': sha})
|
||||
if auth_response.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
raise ConnectionError(f"Keenetic authorisation failed. Status {auth_response.status_code}")
|
||||
elif check_auth_response.status_code == 200:
|
||||
return True
|
||||
raise ConnectionError(f"Failed to check authorisation, status unknown ({check_auth_response.status_code})")
|
||||
|
||||
def metric(self, command: str, params: Dict) -> Dict:
|
||||
if self._auth():
|
||||
url = f"{self._admin_endpoint}/rci/show/{command.replace(' ', '/')}" + "?" + parse.urlencode(
|
||||
params)
|
||||
r = self._session.get(url)
|
||||
if r.status_code == 200:
|
||||
return r.json()
|
||||
raise KeeneticApiException(r.status_code, r.text)
|
||||
else:
|
||||
raise ConnectionError(f"No keenetic connection.")
|
||||
|
||||
|
||||
class KeeneticApiException(Exception):
|
||||
|
||||
def __init__(self, status_code: int, response_text: str):
|
||||
self.status_code = status_code
|
||||
self.response_text = response_text
|
||||
@@ -1,16 +1,20 @@
|
||||
import configparser
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
from typing import Dict, List
|
||||
|
||||
import requests
|
||||
from jsonpath_rw import parse
|
||||
|
||||
from influxdb_writter import InfuxWritter
|
||||
from influxdb_writter import InfuxWriter
|
||||
from keenetic_api import KeeneticClient, KeeneticApiException
|
||||
from value_normalizer import normalize_value
|
||||
|
||||
logging.basicConfig(level='INFO', format='%(asctime)s - %(filename)s - %(levelname)s - %(message)s')
|
||||
|
||||
def json_path_init(paths):
|
||||
|
||||
def json_path_init(paths: Dict[str, str]):
|
||||
queries = {}
|
||||
|
||||
for pathName, path in paths.items():
|
||||
@@ -24,20 +28,21 @@ def json_path_init(paths):
|
||||
|
||||
class KeeneticCollector(object):
|
||||
|
||||
def __init__(self, infuxdb_writter, endpoint, metric_configration):
|
||||
self._influx = infuxdb_writter
|
||||
self._endpoint = endpoint
|
||||
self._command = metric_configration['command']
|
||||
self._params = metric_configration.get('param', {})
|
||||
self._root = parse(metric_configration['root'])
|
||||
self._tags = json_path_init(metric_configration['tags'])
|
||||
self._values = json_path_init(metric_configration['values'])
|
||||
def __init__(self, keenetic_client: KeeneticClient, metric_configuration: Dict[str, object]):
|
||||
self._keenetic_client = keenetic_client
|
||||
self._command: str = metric_configuration['command']
|
||||
self._params = metric_configuration.get('param', {})
|
||||
self._root = parse(metric_configuration['root'])
|
||||
self._tags = json_path_init(metric_configuration['tags'])
|
||||
self._values = json_path_init(metric_configuration['values'])
|
||||
|
||||
def collect(self):
|
||||
|
||||
url = '{}/show/{}'.format(self._endpoint, self._command.replace(' ', '/')) + "?" + urllib.parse.urlencode(
|
||||
self._params)
|
||||
response = json.loads(requests.get(url).content.decode('UTF-8'))
|
||||
def collect(self) -> List[dict]:
|
||||
try:
|
||||
response = self._keenetic_client.metric(self._command, self._params)
|
||||
except KeeneticApiException as e:
|
||||
logging.warning(f"Skipping metric '{self._command}' collection. Reason keenetic api exception, "
|
||||
f"status: {e.status_code}, response: {e.response_text}")
|
||||
return []
|
||||
|
||||
roots = self._root.find(response)
|
||||
metrics = []
|
||||
@@ -55,9 +60,11 @@ class KeeneticCollector(object):
|
||||
|
||||
for valueName, valuePath in self._values.items():
|
||||
value = self.get_first_value(valuePath.find(root.value))
|
||||
if value is not None: values[valueName] = normalize_value(value)
|
||||
if value is not None:
|
||||
values[valueName] = normalize_value(value)
|
||||
|
||||
if values.__len__() == 0: continue
|
||||
if values.__len__() == 0:
|
||||
continue
|
||||
|
||||
metric = self.create_metric(self._command, tags, values)
|
||||
# print(json.dumps(metric))
|
||||
@@ -66,7 +73,7 @@ class KeeneticCollector(object):
|
||||
metrics.append(
|
||||
self.create_metric("collector", {"command": self._command}, {"duration": (time.time_ns() - start_time)}))
|
||||
|
||||
infuxdb_writter.write_metrics(metrics)
|
||||
return metrics
|
||||
|
||||
@staticmethod
|
||||
def create_metric(measurement, tags, values):
|
||||
@@ -86,28 +93,31 @@ class KeeneticCollector(object):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.info("\n\n" +
|
||||
" _ __ _ _ _____ _ _ _ \n | |/ / | | (_) / ____| | | | | | \n | ' / ___ ___ _ __ ___| |_ _ ___ | | ___ | | | ___ ___| |_ ___ _ __ \n | < / _ \/ _ \ '_ \ / _ \ __| |/ __| | | / _ \| | |/ _ \/ __| __/ _ \| '__|\n | . \ __/ __/ | | | __/ |_| | (__ | |___| (_) | | | __/ (__| || (_) | | \n |_|\_\___|\___|_| |_|\___|\__|_|\___| \_____\___/|_|_|\___|\___|\__\___/|_| \n\n")
|
||||
pwd = os.path.dirname(os.path.realpath(__file__))
|
||||
metrics_configuration = json.load(open(pwd + "/config/metrics.json", "r"))
|
||||
|
||||
print(
|
||||
" _ __ _ _ _____ _ _ _ \n | |/ / | | (_) / ____| | | | | | \n | ' / ___ ___ _ __ ___| |_ _ ___ | | ___ | | | ___ ___| |_ ___ _ __ \n | < / _ \/ _ \ '_ \ / _ \ __| |/ __| | | / _ \| | |/ _ \/ __| __/ _ \| '__|\n | . \ __/ __/ | | | __/ |_| | (__ | |___| (_) | | | __/ (__| || (_) | | \n |_|\_\___|\___|_| |_|\___|\__|_|\___| \_____\___/|_|_|\___|\___|\__\___/|_| \n \n ")
|
||||
|
||||
metrics_configuration = json.load(open(os.path.dirname(os.path.realpath(__file__)) + "/config/metrics.json", "r"))
|
||||
influx_configuration = json.load(open(os.path.dirname(os.path.realpath(__file__)) + "/config/influx.json", "r"))
|
||||
|
||||
endpoint = metrics_configuration['endpoint']
|
||||
metrics = metrics_configuration['metrics']
|
||||
|
||||
config = configparser.ConfigParser(interpolation=None)
|
||||
config.read(pwd + "/config/config.ini")
|
||||
infuxdb_writer = InfuxWriter(config['influxdb'])
|
||||
|
||||
keenetic_config = config['keenetic']
|
||||
logging.info("Connecting to router: " + keenetic_config['admin_endpoint'])
|
||||
|
||||
collectors = []
|
||||
with KeeneticClient(keenetic_config['admin_endpoint'], keenetic_config.getboolean('skip_auth'),
|
||||
keenetic_config['login'], keenetic_config['password']) as kc:
|
||||
for metric_configuration in metrics:
|
||||
logging.info("Configuring metric: " + metric_configuration['command'])
|
||||
collectors.append(KeeneticCollector(kc, metric_configuration))
|
||||
|
||||
infuxdb_writter = InfuxWritter(influx_configuration)
|
||||
|
||||
print("Connecting to router: " + endpoint)
|
||||
|
||||
for metric_configuration in metrics:
|
||||
print("Configuring metric: " + metric_configuration['command'])
|
||||
collectors.append(KeeneticCollector(infuxdb_writter, endpoint, metric_configuration))
|
||||
|
||||
print("Configuration done. Start collecting with interval: " + str(metrics_configuration['interval_sec']) + " sec")
|
||||
|
||||
while True:
|
||||
for collector in collectors: collector.collect()
|
||||
time.sleep(metrics_configuration['interval_sec'])
|
||||
wait_interval = config['collector'].getint('interval_sec')
|
||||
logging.info(f"Configuration done. Start collecting with interval: {wait_interval} sec")
|
||||
while True:
|
||||
for collector in collectors:
|
||||
metrics = collector.collect()
|
||||
infuxdb_writer.write_metrics(metrics)
|
||||
time.sleep(wait_interval)
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
influxdb==5.3.0
|
||||
influxdb==5.3.1
|
||||
jsonpath-rw==1.4.0
|
||||
requests==2.25.1
|
||||
|
||||
@@ -1,24 +1,32 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
|
||||
def isstring(value): return isinstance(value, str)
|
||||
def isfloat(value: str): return (re.match(r'^-?\d+(?:\.\d+)?$', value) is not None)
|
||||
def isinteger(value: str): return (re.match('^\d+$', value) is not None)
|
||||
def isvalidmetric(value) : return isinstance(value, int) or isinstance(value, float) or isinstance(value, bool)
|
||||
|
||||
|
||||
def isfloat(value: str): return re.match(r'^-?\d+(?:\.\d+)?$', value) is not None
|
||||
|
||||
|
||||
def isinteger(value: str): return re.match(r'^\d+$', value) is not None
|
||||
|
||||
|
||||
def isvalidmetric(value): return isinstance(value, int) or isinstance(value, float) or isinstance(value, bool)
|
||||
|
||||
|
||||
type_mapping = {
|
||||
"yes" : 1,
|
||||
"no" : 0,
|
||||
"up" : 1,
|
||||
"down" : 0,
|
||||
"yes": 1,
|
||||
"no": 0,
|
||||
"up": 1,
|
||||
"down": 0,
|
||||
True: 1,
|
||||
False: 0,
|
||||
"MOUNTED" : 1,
|
||||
"UNMOUNTED" : 0
|
||||
"MOUNTED": 1,
|
||||
"UNMOUNTED": 0
|
||||
}
|
||||
|
||||
def normalize_value(value):
|
||||
|
||||
def normalize_value(value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
@@ -30,12 +38,11 @@ def normalize_value(value):
|
||||
if isvalidmetric(value):
|
||||
return value
|
||||
else:
|
||||
print("WARN Value: " + str(value) + " is not valid metric type")
|
||||
logging.warning("Value: " + str(value) + " is not valid metric type")
|
||||
return None
|
||||
|
||||
|
||||
def parse_string(value):
|
||||
|
||||
value = remove_data_unit(value)
|
||||
|
||||
if isinteger(value):
|
||||
|
||||
Reference in New Issue
Block a user