Using admin api, instead open port (#1)

* influxdb2 integration
* using python alpine base image
* add authorization, refactoring config
* logging instead print, keenetic api exception handling when try to collect metrics, a little pep8
This commit is contained in:
Sergei Samokhvalov
2021-03-24 13:44:34 +03:00
committed by GitHub
parent 7661b5819d
commit 16a37bf0d9
12 changed files with 196 additions and 97 deletions

2
.gitignore vendored
View File

@@ -1,4 +1,4 @@
__pycache__ __pycache__
.idea .idea
*.iml *.iml
config/influx.json config/config.ini

View File

@@ -1,10 +1,11 @@
FROM python:3-slim FROM python:3.8-alpine AS dependencies
COPY requirements.txt .
ADD keentic_influxdb_exporter.py /home RUN pip install --no-cache-dir --user --no-warn-script-location -r requirements.txt
ADD requirements.txt /home
ADD value_normalizer.py /home FROM python:3.8-alpine AS build-image
ADD influxdb_writter.py /home COPY --from=dependencies /root/.local /root/.local
ADD config/metrics.json /home/config/metrics.json
COPY value_normalizer.py keentic_influxdb_exporter.py influxdb_writter.py keenetic_api.py /home/
RUN pip install -r /home/requirements.txt
CMD [ "python", "-u", "/home/keentic_influxdb_exporter.py" ] CMD [ "python", "-u", "/home/keentic_influxdb_exporter.py" ]

View File

@@ -12,29 +12,37 @@
# Supporter router # Supporter router
Tested with: Keenetic Ultra (KN-1810) KeeneticOS 3.5.6 Tested with:
- Keenetic Ultra (KN-1810) KeeneticOS 3.5.6
- Keenetic Giga (KN-1010) KeeneticOS 3.5.6
May works on other Keenetic routers May work on other Keenetic routers
# Preparation # Preparation
* Create InfluxDB configuration file `influx.json` * Create configuration file `config.ini`
```json ```ini
{ [influxdb]
"influxdb": { host=<HOST>
"host": "<HOST>", port=80
"port": 80, username=admin
"username": "admin", password=<INFLUX_PASS>
"password": "<PASS>", db=keenetic
"db": "keenetic" [keenetic]
} skip_auth=false
} admin_endpoint=http://192.168.1.1:80
login=admin
password=<KEENETIC_PASS>
[collector]
interval_sec=30
``` ```
* Copy [metrics.json](https://github.com/vitaliy-sk/keenetic-grafana-monitoring/blob/master/config/metrics.json) and edit (Optional) * Copy [metrics.json](https://github.com/vitaliy-sk/keenetic-grafana-monitoring/blob/master/config/metrics.json) and edit (Optional)
* Expose Keenetic API on your router * Create admin user (Users and access -> Create user, allow 'Web interface' and 'Prohibit saving system settings')
* (Alternative to create user) Expose Keenetic API on your router
For doing this add port forwarding (Network rules -> Forwarding): For doing this add port forwarding (Network rules -> Forwarding):
``` ```
@@ -45,6 +53,11 @@ Output: This Keenetic
Open the port: 79 Open the port: 79
Destination port: 79 Destination port: 79
``` ```
Update `conifg.ini`
```
[keenetic]
skip_auth=true
```
* Import Grafana dashboard from [grafana.com](https://grafana.com/grafana/dashboards/12723) * Import Grafana dashboard from [grafana.com](https://grafana.com/grafana/dashboards/12723)
@@ -81,8 +94,8 @@ services:
# environment: # environment:
# - TZ=Europe/Kiev # - TZ=Europe/Kiev
volumes: volumes:
- ./config/influx.json:/home/config/influx.json - ./config/config.ini:/home/config/config.ini:ro
- ./config/metrics.json:/home/config/metrics.json - ./config/metrics.json:/home/config/metrics.json:ro
restart: always restart: always
``` ```

13
config/config.ini.sample Normal file
View File

@@ -0,0 +1,13 @@
[influxdb]
host=<HOST>
port=80
username=admin
password=<INFLUX_PASS>
db=keenetic
[keenetic]
admin_endpoint=http://192.168.1.1:80
skip_auth=false
login=admin
password=<KEENETIC_PASS>
[collector]
interval_sec=30

View File

@@ -1,9 +0,0 @@
{
"influxdb": {
"host": "<HOST>",
"port": 80,
"username": "admin",
"password": "<PASS>",
"db": "keenetic"
}
}

View File

@@ -1,6 +1,4 @@
{ {
"endpoint" : "http://192.168.1.1:79/rci",
"interval_sec" : 30,
"metrics" : [ "metrics" : [
{ {
"command": "processes", "command": "processes",

View File

@@ -7,6 +7,6 @@ services:
# environment: # environment:
# - TZ=Europe/Kiev # - TZ=Europe/Kiev
volumes: volumes:
- ./config/influx.json:/home/config/influx.json - ./config/config.ini:/home/config/config.ini:ro
- ./config/metrics.json:/home/config/metrics.json - ./config/metrics.json:/home/config/metrics.json:ro
restart: always restart: always

View File

@@ -1,22 +1,27 @@
import logging
import requests import requests
from influxdb import InfluxDBClient from influxdb import InfluxDBClient
class InfuxWritter(object):
class InfuxWriter(object):
def __init__(self, configuration): def __init__(self, configuration):
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
self._configuration = configuration['influxdb'] self._configuration = configuration
self._client = InfluxDBClient(self._configuration['host'], self._configuration['port'], self._configuration['username'], self._configuration['password'], self._configuration['db']) self._client = InfluxDBClient(self._configuration['host'], self._configuration['port'],
self._configuration['username'], self._configuration['password'],
self._configuration['db'])
self.init_database() self.init_database()
def init_database(self): def init_database(self):
print("Connecting to InfluxDB: " + self._configuration['host']) logging.info("Connecting to InfluxDB: " + self._configuration['host'])
db_name = self._configuration['db'] db_name = self._configuration['db']
# self._client.drop_database(db_name) # self._client.drop_database(db_name)
if db_name not in self._client.get_list_database(): if db_name not in self._client.get_list_database():
print("Creating InfluxDB database: " + db_name) logging.info("Creating InfluxDB database: " + db_name)
self._client.create_database(db_name) self._client.create_database(db_name)
def write_metrics(self, metrics): def write_metrics(self, metrics):
self._client.write_points( metrics ) self._client.write_points(metrics)

60
keenetic_api.py Normal file
View File

@@ -0,0 +1,60 @@
import hashlib
from hashlib import sha256
from typing import Dict
from urllib import parse
from requests import Session
class KeeneticClient:
def __init__(self, admin_endpoint: str, skip_auth: bool, login: str, password: str):
self._admin_endpoint = admin_endpoint
self._skip_auth = skip_auth
self._login = login
self._password = password
def __enter__(self):
self._session = Session()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self._session:
self._session.close()
def _auth(self) -> bool:
if self._skip_auth:
return True
auth_endpoint = f"{self._admin_endpoint}/auth"
check_auth_response = self._session.get(auth_endpoint)
if check_auth_response.status_code == 401:
ndm_challenge = check_auth_response.headers.get('X-NDM-Challenge')
ndm_realm = check_auth_response.headers.get('X-NDM-Realm')
md5 = hashlib.md5((self._login + ':' + ndm_realm + ':' + self._password).encode('utf-8')).hexdigest()
sha = sha256((ndm_challenge + md5).encode('utf-8')).hexdigest()
auth_response = self._session.post(auth_endpoint, json={'login': self._login, 'password': sha})
if auth_response.status_code == 200:
return True
else:
raise ConnectionError(f"Keenetic authorisation failed. Status {auth_response.status_code}")
elif check_auth_response.status_code == 200:
return True
raise ConnectionError(f"Failed to check authorisation, status unknown ({check_auth_response.status_code})")
def metric(self, command: str, params: Dict) -> Dict:
if self._auth():
url = f"{self._admin_endpoint}/rci/show/{command.replace(' ', '/')}" + "?" + parse.urlencode(
params)
r = self._session.get(url)
if r.status_code == 200:
return r.json()
raise KeeneticApiException(r.status_code, r.text)
else:
raise ConnectionError(f"No keenetic connection.")
class KeeneticApiException(Exception):
def __init__(self, status_code: int, response_text: str):
self.status_code = status_code
self.response_text = response_text

View File

@@ -1,16 +1,20 @@
import configparser
import json import json
import logging
import os import os
import time import time
import urllib from typing import Dict, List
import requests
from jsonpath_rw import parse from jsonpath_rw import parse
from influxdb_writter import InfuxWritter from influxdb_writter import InfuxWriter
from keenetic_api import KeeneticClient, KeeneticApiException
from value_normalizer import normalize_value from value_normalizer import normalize_value
logging.basicConfig(level='INFO', format='%(asctime)s - %(filename)s - %(levelname)s - %(message)s')
def json_path_init(paths):
def json_path_init(paths: Dict[str, str]):
queries = {} queries = {}
for pathName, path in paths.items(): for pathName, path in paths.items():
@@ -24,20 +28,21 @@ def json_path_init(paths):
class KeeneticCollector(object): class KeeneticCollector(object):
def __init__(self, infuxdb_writter, endpoint, metric_configration): def __init__(self, keenetic_client: KeeneticClient, metric_configuration: Dict[str, object]):
self._influx = infuxdb_writter self._keenetic_client = keenetic_client
self._endpoint = endpoint self._command: str = metric_configuration['command']
self._command = metric_configration['command'] self._params = metric_configuration.get('param', {})
self._params = metric_configration.get('param', {}) self._root = parse(metric_configuration['root'])
self._root = parse(metric_configration['root']) self._tags = json_path_init(metric_configuration['tags'])
self._tags = json_path_init(metric_configration['tags']) self._values = json_path_init(metric_configuration['values'])
self._values = json_path_init(metric_configration['values'])
def collect(self): def collect(self) -> List[dict]:
try:
url = '{}/show/{}'.format(self._endpoint, self._command.replace(' ', '/')) + "?" + urllib.parse.urlencode( response = self._keenetic_client.metric(self._command, self._params)
self._params) except KeeneticApiException as e:
response = json.loads(requests.get(url).content.decode('UTF-8')) logging.warning(f"Skipping metric '{self._command}' collection. Reason keenetic api exception, "
f"status: {e.status_code}, response: {e.response_text}")
return []
roots = self._root.find(response) roots = self._root.find(response)
metrics = [] metrics = []
@@ -55,9 +60,11 @@ class KeeneticCollector(object):
for valueName, valuePath in self._values.items(): for valueName, valuePath in self._values.items():
value = self.get_first_value(valuePath.find(root.value)) value = self.get_first_value(valuePath.find(root.value))
if value is not None: values[valueName] = normalize_value(value) if value is not None:
values[valueName] = normalize_value(value)
if values.__len__() == 0: continue if values.__len__() == 0:
continue
metric = self.create_metric(self._command, tags, values) metric = self.create_metric(self._command, tags, values)
# print(json.dumps(metric)) # print(json.dumps(metric))
@@ -66,7 +73,7 @@ class KeeneticCollector(object):
metrics.append( metrics.append(
self.create_metric("collector", {"command": self._command}, {"duration": (time.time_ns() - start_time)})) self.create_metric("collector", {"command": self._command}, {"duration": (time.time_ns() - start_time)}))
infuxdb_writter.write_metrics(metrics) return metrics
@staticmethod @staticmethod
def create_metric(measurement, tags, values): def create_metric(measurement, tags, values):
@@ -86,28 +93,31 @@ class KeeneticCollector(object):
if __name__ == '__main__': if __name__ == '__main__':
logging.info("\n\n" +
" _ __ _ _ _____ _ _ _ \n | |/ / | | (_) / ____| | | | | | \n | ' / ___ ___ _ __ ___| |_ _ ___ | | ___ | | | ___ ___| |_ ___ _ __ \n | < / _ \/ _ \ '_ \ / _ \ __| |/ __| | | / _ \| | |/ _ \/ __| __/ _ \| '__|\n | . \ __/ __/ | | | __/ |_| | (__ | |___| (_) | | | __/ (__| || (_) | | \n |_|\_\___|\___|_| |_|\___|\__|_|\___| \_____\___/|_|_|\___|\___|\__\___/|_| \n\n")
pwd = os.path.dirname(os.path.realpath(__file__))
metrics_configuration = json.load(open(pwd + "/config/metrics.json", "r"))
print(
" _ __ _ _ _____ _ _ _ \n | |/ / | | (_) / ____| | | | | | \n | ' / ___ ___ _ __ ___| |_ _ ___ | | ___ | | | ___ ___| |_ ___ _ __ \n | < / _ \/ _ \ '_ \ / _ \ __| |/ __| | | / _ \| | |/ _ \/ __| __/ _ \| '__|\n | . \ __/ __/ | | | __/ |_| | (__ | |___| (_) | | | __/ (__| || (_) | | \n |_|\_\___|\___|_| |_|\___|\__|_|\___| \_____\___/|_|_|\___|\___|\__\___/|_| \n \n ")
metrics_configuration = json.load(open(os.path.dirname(os.path.realpath(__file__)) + "/config/metrics.json", "r"))
influx_configuration = json.load(open(os.path.dirname(os.path.realpath(__file__)) + "/config/influx.json", "r"))
endpoint = metrics_configuration['endpoint']
metrics = metrics_configuration['metrics'] metrics = metrics_configuration['metrics']
config = configparser.ConfigParser(interpolation=None)
config.read(pwd + "/config/config.ini")
infuxdb_writer = InfuxWriter(config['influxdb'])
keenetic_config = config['keenetic']
logging.info("Connecting to router: " + keenetic_config['admin_endpoint'])
collectors = [] collectors = []
with KeeneticClient(keenetic_config['admin_endpoint'], keenetic_config.getboolean('skip_auth'),
keenetic_config['login'], keenetic_config['password']) as kc:
for metric_configuration in metrics:
logging.info("Configuring metric: " + metric_configuration['command'])
collectors.append(KeeneticCollector(kc, metric_configuration))
infuxdb_writter = InfuxWritter(influx_configuration) wait_interval = config['collector'].getint('interval_sec')
logging.info(f"Configuration done. Start collecting with interval: {wait_interval} sec")
print("Connecting to router: " + endpoint) while True:
for collector in collectors:
for metric_configuration in metrics: metrics = collector.collect()
print("Configuring metric: " + metric_configuration['command']) infuxdb_writer.write_metrics(metrics)
collectors.append(KeeneticCollector(infuxdb_writter, endpoint, metric_configuration)) time.sleep(wait_interval)
print("Configuration done. Start collecting with interval: " + str(metrics_configuration['interval_sec']) + " sec")
while True:
for collector in collectors: collector.collect()
time.sleep(metrics_configuration['interval_sec'])

View File

@@ -1,2 +1,3 @@
influxdb==5.3.0 influxdb==5.3.1
jsonpath-rw==1.4.0 jsonpath-rw==1.4.0
requests==2.25.1

View File

@@ -1,24 +1,32 @@
import logging
import re import re
def isstring(value): return isinstance(value, str) def isstring(value): return isinstance(value, str)
def isfloat(value: str): return (re.match(r'^-?\d+(?:\.\d+)?$', value) is not None)
def isinteger(value: str): return (re.match('^\d+$', value) is not None)
def isvalidmetric(value) : return isinstance(value, int) or isinstance(value, float) or isinstance(value, bool) def isfloat(value: str): return re.match(r'^-?\d+(?:\.\d+)?$', value) is not None
def isinteger(value: str): return re.match(r'^\d+$', value) is not None
def isvalidmetric(value): return isinstance(value, int) or isinstance(value, float) or isinstance(value, bool)
type_mapping = { type_mapping = {
"yes" : 1, "yes": 1,
"no" : 0, "no": 0,
"up" : 1, "up": 1,
"down" : 0, "down": 0,
True: 1, True: 1,
False: 0, False: 0,
"MOUNTED" : 1, "MOUNTED": 1,
"UNMOUNTED" : 0 "UNMOUNTED": 0
} }
def normalize_value(value):
def normalize_value(value):
if value is None: if value is None:
return None return None
@@ -30,12 +38,11 @@ def normalize_value(value):
if isvalidmetric(value): if isvalidmetric(value):
return value return value
else: else:
print("WARN Value: " + str(value) + " is not valid metric type") logging.warning("Value: " + str(value) + " is not valid metric type")
return None return None
def parse_string(value): def parse_string(value):
value = remove_data_unit(value) value = remove_data_unit(value)
if isinteger(value): if isinteger(value):