From 506f044bfacdbf2e57448e4dd6f796b16fe764d0 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 6 Jun 2022 14:52:40 +0530 Subject: [PATCH 01/64] [feature] Implement Iperf check #385 - Added initial code for Iperf check class. - Added tests. Closes #385 --- openwisp_monitoring/check/apps.py | 8 +++ openwisp_monitoring/check/base/models.py | 24 ++++++++- openwisp_monitoring/check/classes/__init__.py | 1 + openwisp_monitoring/check/classes/iperf.py | 49 ++++++++++++++++++ openwisp_monitoring/check/settings.py | 11 ++++ openwisp_monitoring/check/tasks.py | 27 ++++++++++ .../check/tests/test_models.py | 50 +++++++++++++++---- openwisp_monitoring/check/tests/test_ping.py | 2 +- .../device/tests/test_transactions.py | 2 + openwisp_monitoring/tests/test_selenium.py | 2 + tests/openwisp2/settings.py | 2 + 11 files changed, 165 insertions(+), 13 deletions(-) create mode 100644 openwisp_monitoring/check/classes/iperf.py diff --git a/openwisp_monitoring/check/apps.py b/openwisp_monitoring/check/apps.py index 15c9832f0..8f030cf4f 100644 --- a/openwisp_monitoring/check/apps.py +++ b/openwisp_monitoring/check/apps.py @@ -32,3 +32,11 @@ def _connect_signals(self): sender=load_model('config', 'Device'), dispatch_uid='auto_config_check', ) + if app_settings.AUTO_IPERF: + from .base.models import auto_iperf_check_receiver + + post_save.connect( + auto_iperf_check_receiver, + sender=load_model('config', 'Device'), + dispatch_uid='auto_iperf_check', + ) diff --git a/openwisp_monitoring/check/base/models.py b/openwisp_monitoring/check/base/models.py index a0bab9e66..e4abad47a 100644 --- a/openwisp_monitoring/check/base/models.py +++ b/openwisp_monitoring/check/base/models.py @@ -9,7 +9,11 @@ from jsonfield import JSONField from openwisp_monitoring.check import settings as app_settings -from openwisp_monitoring.check.tasks import auto_create_config_check, auto_create_ping +from openwisp_monitoring.check.tasks import ( + auto_create_config_check, + auto_create_iperf_check, + auto_create_ping, +) from openwisp_utils.base import TimeStampedEditableModel from ...utils import transaction_on_commit @@ -116,3 +120,21 @@ def auto_config_check_receiver(sender, instance, created, **kwargs): object_id=str(instance.pk), ) ) + + +def auto_iperf_check_receiver(sender, instance, created, **kwargs): + """ + Implements OPENWISP_MONITORING_AUTO_IPERF + The creation step is executed in the background + """ + # we need to skip this otherwise this task will be executed + # every time the configuration is requested via checksum + if not created: + return + transaction_on_commit( + lambda: auto_create_iperf_check.delay( + model=sender.__name__.lower(), + app_label=sender._meta.app_label, + object_id=str(instance.pk), + ) + ) diff --git a/openwisp_monitoring/check/classes/__init__.py b/openwisp_monitoring/check/classes/__init__.py index 33bf8293c..4a85b5243 100644 --- a/openwisp_monitoring/check/classes/__init__.py +++ b/openwisp_monitoring/check/classes/__init__.py @@ -1,2 +1,3 @@ from .config_applied import ConfigApplied # noqa +from .iperf import Iperf # noqa from .ping import Ping # noqa diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py new file mode 100644 index 000000000..80698be23 --- /dev/null +++ b/openwisp_monitoring/check/classes/iperf.py @@ -0,0 +1,49 @@ +from swapper import load_model + +from .base import BaseCheck + +Chart = load_model('monitoring', 'Chart') +Metric = load_model('monitoring', 'Metric') +Device = load_model('config', 'Device') +DeviceData = load_model('device_monitoring', 'DeviceData') +Credentials = load_model('connection', 'Credentials') +AlertSettings = load_model('monitoring', 'AlertSettings') +DeviceConnection = load_model('connection', 'DeviceConnection') + + +class Iperf(BaseCheck): + def check(self, store=True): + pass + + def store_result(self, result): + """ + store result in the DB + """ + pass + + def _get_iperf_servers(self): + """ + Get iperf test servers + """ + pass + + def _get_iperf_result(self, mode=None): + """ + Get iperf test result + """ + pass + + def _get_metric(self): + """ + Gets or creates metric + """ + pass + + def _create_charts(self, metric): + """ + Creates iperf related charts (Bandwith/Jitter) + """ + pass + + def _create_alert_settings(self, metric): + pass diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 4575c8eca..9ebab74cc 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -5,9 +5,20 @@ ( ('openwisp_monitoring.check.classes.Ping', 'Ping'), ('openwisp_monitoring.check.classes.ConfigApplied', 'Configuration Applied'), + ('openwisp_monitoring.check.classes.Iperf', 'Iperf'), ), ) AUTO_PING = get_settings_value('AUTO_PING', True) AUTO_CONFIG_CHECK = get_settings_value('AUTO_DEVICE_CONFIG_CHECK', True) MANAGEMENT_IP_ONLY = get_settings_value('MANAGEMENT_IP_ONLY', True) PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) +# By default it should be disabled. +AUTO_IPERF = get_settings_value('AUTO_IPERF', False) +# IPERF_SERVERS = get_settings_value( +# 'IPERF_SERVERS', +# { +# # Running on my local +# 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['172.19.0.1'], +# # '': [''] +# }, +# ) diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 2ae62bc0d..9a509ea2b 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -100,3 +100,30 @@ def auto_create_config_check( ) check.full_clean() check.save() + + +@shared_task +def auto_create_iperf_check( + model, app_label, object_id, check_model=None, content_type_model=None +): + """ + Called by openwisp_monitoring.check.models.auto_iperf_check_receiver + """ + Check = check_model or get_check_model() + iperf_check_path = 'openwisp_monitoring.check.classes.Iperf' + has_check = Check.objects.filter( + object_id=object_id, content_type__model='device', check_type=iperf_check_path + ).exists() + # create new check only if necessary + if has_check: + return + content_type_model = content_type_model or ContentType + ct = content_type_model.objects.get(app_label=app_label, model=model) + check = Check( + name='Iperf', + check_type=iperf_check_path, + content_type=ct, + object_id=object_id, + ) + check.full_clean() + check.save() diff --git a/openwisp_monitoring/check/tests/test_models.py b/openwisp_monitoring/check/tests/test_models.py index abbf8ed13..3bb8e13f6 100644 --- a/openwisp_monitoring/check/tests/test_models.py +++ b/openwisp_monitoring/check/tests/test_models.py @@ -9,8 +9,8 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings as app_settings -from ..classes import ConfigApplied, Ping -from ..tasks import auto_create_config_check, auto_create_ping +from ..classes import ConfigApplied, Iperf, Ping +from ..tasks import auto_create_config_check, auto_create_iperf_check, auto_create_ping Check = load_model('check', 'Check') Metric = load_model('monitoring', 'Metric') @@ -22,6 +22,7 @@ class TestModels(TestDeviceMonitoringMixin, TransactionTestCase): _PING = app_settings.CHECK_CLASSES[0][0] _CONFIG_APPLIED = app_settings.CHECK_CLASSES[1][0] + _IPERF = app_settings.CHECK_CLASSES[2][0] def test_check_str(self): c = Check(name='Test check') @@ -48,6 +49,12 @@ def test_check_class(self): check_type=self._CONFIG_APPLIED, ) self.assertEqual(c.check_class, ConfigApplied) + with self.subTest('Test Iperf check Class'): + c = Check( + name='Iperf class check', + check_type=self._IPERF, + ) + self.assertEqual(c.check_class, Iperf) def test_base_check_class(self): path = 'openwisp_monitoring.check.classes.base.BaseCheck' @@ -82,6 +89,18 @@ def test_check_instance(self): self.assertEqual(i.related_object, obj) self.assertEqual(i.params, c.params) + with self.subTest('Test Iperf check instance'): + c = Check( + name='Iperf class check', + check_type=self._IPERF, + content_object=obj, + params={}, + ) + i = c.check_instance + self.assertIsInstance(i, Iperf) + self.assertEqual(i.related_object, obj) + self.assertEqual(i.params, c.params) + def test_validation(self): with self.subTest('Test Ping check validation'): check = Check(name='Ping check', check_type=self._PING, params={}) @@ -105,7 +124,7 @@ def test_validation(self): def test_auto_check_creation(self): self.assertEqual(Check.objects.count(), 0) d = self._create_device(organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) with self.subTest('Test AUTO_PING'): c1 = Check.objects.filter(check_type=self._PING).first() self.assertEqual(c1.content_object, d) @@ -114,11 +133,15 @@ def test_auto_check_creation(self): c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() self.assertEqual(c2.content_object, d) self.assertEqual(self._CONFIG_APPLIED, c2.check_type) + with self.subTest('Test AUTO_IPERF'): + c3 = Check.objects.filter(check_type=self._IPERF).first() + self.assertEqual(c3.content_object, d) + self.assertEqual(self._IPERF, c3.check_type) def test_device_deleted(self): self.assertEqual(Check.objects.count(), 0) d = self._create_device(organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) d.delete() self.assertEqual(Check.objects.count(), 0) @@ -129,7 +152,7 @@ def test_config_modified_device_problem(self): self._create_config(status='modified', organization=self._create_org()) d = Device.objects.first() d.monitoring.update_status('ok') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) self.assertEqual(Metric.objects.count(), 0) self.assertEqual(AlertSettings.objects.count(), 0) check = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() @@ -159,7 +182,7 @@ def test_config_error(self): self._create_config(status='error', organization=self._create_org()) dm = Device.objects.first().monitoring dm.update_status('ok') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) self.assertEqual(Metric.objects.count(), 0) self.assertEqual(AlertSettings.objects.count(), 0) check = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() @@ -192,7 +215,7 @@ def test_config_error(self): @patch('openwisp_monitoring.check.settings.AUTO_PING', False) def test_config_check_critical_metric(self): self._create_config(status='modified', organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) d = Device.objects.first() dm = d.monitoring dm.update_status('ok') @@ -211,7 +234,7 @@ def test_config_check_critical_metric(self): def test_no_duplicate_check_created(self): self._create_config(organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) d = Device.objects.first() auto_create_config_check.delay( model=Device.__name__.lower(), @@ -223,13 +246,18 @@ def test_no_duplicate_check_created(self): app_label=Device._meta.app_label, object_id=str(d.pk), ) - self.assertEqual(Check.objects.count(), 2) + auto_create_iperf_check.delay( + model=Device.__name__.lower(), + app_label=Device._meta.app_label, + object_id=str(d.pk), + ) + self.assertEqual(Check.objects.count(), 3) def test_device_unreachable_no_config_check(self): self._create_config(status='modified', organization=self._create_org()) d = self.device_model.objects.first() d.monitoring.update_status('critical') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() c2.perform_check() self.assertEqual(Metric.objects.count(), 0) @@ -240,7 +268,7 @@ def test_device_unknown_no_config_check(self): self._create_config(status='modified', organization=self._create_org()) d = self.device_model.objects.first() d.monitoring.update_status('unknown') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() c2.perform_check() self.assertEqual(Metric.objects.count(), 0) diff --git a/openwisp_monitoring/check/tests/test_ping.py b/openwisp_monitoring/check/tests/test_ping.py index 11b9ee47b..23b94652d 100644 --- a/openwisp_monitoring/check/tests/test_ping.py +++ b/openwisp_monitoring/check/tests/test_ping.py @@ -239,7 +239,7 @@ def test_store_result(self, mocked_method): device.management_ip = '10.40.0.1' device.save() # check created automatically by autoping - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) self.assertEqual(Metric.objects.count(), 0) self.assertEqual(Chart.objects.count(), 0) self.assertEqual(AlertSettings.objects.count(), 0) diff --git a/openwisp_monitoring/device/tests/test_transactions.py b/openwisp_monitoring/device/tests/test_transactions.py index 5fd3d9698..1cbdb5165 100644 --- a/openwisp_monitoring/device/tests/test_transactions.py +++ b/openwisp_monitoring/device/tests/test_transactions.py @@ -62,6 +62,8 @@ def test_trigger_device_recovery_task_regression( dm = self._create_device_monitoring() dm.device.management_ip = None dm.device.save() + # Delete iperf check to prevent unnecessary response timeout + Check.objects.filter(check_type__endswith='Iperf').delete() trigger_device_checks.delay(dm.device.pk) self.assertTrue(Check.objects.exists()) # we expect update_status() to be called once (by the check) diff --git a/openwisp_monitoring/tests/test_selenium.py b/openwisp_monitoring/tests/test_selenium.py index 78e02ca96..8de1c8b2b 100644 --- a/openwisp_monitoring/tests/test_selenium.py +++ b/openwisp_monitoring/tests/test_selenium.py @@ -89,6 +89,8 @@ def test_restoring_deleted_device(self): org = self._get_org() self._create_credentials(auto_add=True, organization=org) device = self._create_config(organization=org).device + # Delete iperf check to prevent unnecessary response timeout + Check.objects.filter(check_type__endswith='Iperf').delete() device_data = DeviceData.objects.get(id=device.id) device_checks = device_data.checks.all() for check in device_checks: diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index e3559e13e..bedb5e401 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -196,6 +196,8 @@ OPENWISP_MONITORING_MAC_VENDOR_DETECTION = False OPENWISP_MONITORING_API_URLCONF = 'openwisp_monitoring.urls' OPENWISP_MONITORING_API_BASEURL = 'http://testserver' + # for testing AUTO_IPERF + OPENWISP_MONITORING_AUTO_IPERF = True # Temporarily added to identify slow tests TEST_RUNNER = 'openwisp_utils.tests.TimeLoggingTestRunner' From 6bd112b85b34fb0ff7ada391849cdc8be9d36743 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Sun, 12 Jun 2022 00:39:34 +0530 Subject: [PATCH 02/64] [feature] Added device connection logic --- openwisp_monitoring/check/classes/iperf.py | 96 ++++++++++++++++++++-- openwisp_monitoring/check/settings.py | 18 ++-- 2 files changed, 98 insertions(+), 16 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 80698be23..637ba89f1 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -1,5 +1,11 @@ +import json + +from django.core.exceptions import ObjectDoesNotExist from swapper import load_model +from openwisp_controller.connection.settings import UPDATE_STRATEGIES + +from .. import settings as app_settings from .base import BaseCheck Chart = load_model('monitoring', 'Chart') @@ -13,24 +19,100 @@ class Iperf(BaseCheck): def check(self, store=True): - pass + try: + device = self.related_object + device_connection = self._check_device_connection(device) + if device_connection: + device_connection.connect() + print(f'--- [{self.related_object}] is connected! ---') + servers = self._get_iperf_servers(device.organization.id) + command = f'iperf3 -c {servers[0]} -J' + res, exit_code = device_connection.connector_instance.exec_command( + command, raise_unexpected_exit=False + ) + if exit_code != 0: + print('---- Command Failed ----') + if store: + self.store_result( + { + 'iperf_result': 0, + 'sum_sent_bps': 0.0, + 'sum_rec_bps': 0.0, + 'sum_sent_bytes': 0.0, + 'sum_rec_bytes': 0.0, + 'sum_sent_retransmits': 0, + } + ) + return + else: + result_dict = self._get_iperf_result(res) + print('---- Command Output ----') + print(result_dict) + if store: + self.store_result(result_dict) + return result_dict + else: + print( + f'{self.related_object}: connection not properly set, Iperf skipped!' + ) + return + # If device have not active connection warning logged (return) + except ObjectDoesNotExist: + print(f'{self.related_object}: has no active connection, Iperf skipped!') + return - def store_result(self, result): + def _check_device_connection(self, device): """ - store result in the DB + Check device has an active connection with right update_strategy(ssh) """ - pass + openwrt_ssh = UPDATE_STRATEGIES[0][0] + device_connection = DeviceConnection.objects.get(device_id=device.id) + if device_connection.update_strategy == openwrt_ssh: + if device_connection.enabled and device_connection.is_working: + return device_connection + else: + return False + else: + return False - def _get_iperf_servers(self): + def _get_iperf_servers(self, organization): """ Get iperf test servers """ - pass + org_servers = app_settings.IPERF_SERVERS.get(str(organization)) + return org_servers - def _get_iperf_result(self, mode=None): + def _get_iperf_result(self, res, mode=None): """ Get iperf test result """ + res_dict = json.loads(res) + if mode is None: + result = { + 'iperf_result': 1, + 'sum_sent_bps': round( + res_dict['end']['sum_sent']['bits_per_second'] / 1000000000, 2 + ), + 'sum_rec_bps': round( + res_dict['end']['sum_received']['bits_per_second'] / 1000000000, 2 + ), + 'sum_sent_bytes': round( + res_dict['end']['sum_sent']['bytes'] / 1000000000, 2 + ), + 'sum_rec_bytes': round( + res_dict['end']['sum_received']['bytes'] / 1000000000, 2 + ), + 'sum_sent_retransmits': res_dict['end']['sum_sent']['retransmits'], + } + return result + # For UDP + else: + pass + + def store_result(self, result): + """ + store result in the DB + """ pass def _get_metric(self): diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 9ebab74cc..3bda6e0fb 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -13,12 +13,12 @@ MANAGEMENT_IP_ONLY = get_settings_value('MANAGEMENT_IP_ONLY', True) PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) # By default it should be disabled. -AUTO_IPERF = get_settings_value('AUTO_IPERF', False) -# IPERF_SERVERS = get_settings_value( -# 'IPERF_SERVERS', -# { -# # Running on my local -# 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['172.19.0.1'], -# # '': [''] -# }, -# ) +AUTO_IPERF = get_settings_value('AUTO_IPERF', True) +IPERF_SERVERS = get_settings_value( + 'IPERF_SERVERS', + { + # Running on my local + 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['192.168.5.109'], + # '': [''] + }, +) From c47467ef97bc91783babd7f8ffcaa9a06fd23f45 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 13 Jun 2022 17:41:03 +0530 Subject: [PATCH 03/64] [feature] Added charts and metric (TCP) --- openwisp_monitoring/check/classes/iperf.py | 60 ++++++++++++------- openwisp_monitoring/check/settings.py | 2 + .../db/backends/influxdb/queries.py | 23 +++++++ .../monitoring/configuration.py | 50 ++++++++++++++++ 4 files changed, 114 insertions(+), 21 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 637ba89f1..d56280174 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -36,13 +36,14 @@ def check(self, store=True): self.store_result( { 'iperf_result': 0, - 'sum_sent_bps': 0.0, - 'sum_rec_bps': 0.0, - 'sum_sent_bytes': 0.0, - 'sum_rec_bytes': 0.0, - 'sum_sent_retransmits': 0, + 'sent_bps': 0.0, + 'received_bps': 0.0, + 'sent_bytes': 0.0, + 'received_bytes': 0.0, + 'retransmits': 0, } ) + device_connection.disconnect() return else: result_dict = self._get_iperf_result(res) @@ -50,6 +51,7 @@ def check(self, store=True): print(result_dict) if store: self.store_result(result_dict) + device_connection.disconnect() return result_dict else: print( @@ -88,21 +90,27 @@ def _get_iperf_result(self, res, mode=None): """ res_dict = json.loads(res) if mode is None: + # Gbps = Gigabits per second + # GB = GigaBytes + sent_json = res_dict['end']['sum_sent'] + recv_json = res_dict['end']['sum_received'] + sent_bytes = sent_json['bytes'] + sent_bytes_GB = sent_bytes / 1000000000 + sent_bps = sent_json['bits_per_second'] + sent_Gbps = sent_bps / 1000000000 + received_bytes = recv_json['bytes'] + received_bytes_GB = received_bytes / 1000000000 + received_bps = recv_json['bits_per_second'] + received_Gbps = received_bps / 1000000000 + retransmits = sent_json['retransmits'] + result = { 'iperf_result': 1, - 'sum_sent_bps': round( - res_dict['end']['sum_sent']['bits_per_second'] / 1000000000, 2 - ), - 'sum_rec_bps': round( - res_dict['end']['sum_received']['bits_per_second'] / 1000000000, 2 - ), - 'sum_sent_bytes': round( - res_dict['end']['sum_sent']['bytes'] / 1000000000, 2 - ), - 'sum_rec_bytes': round( - res_dict['end']['sum_received']['bytes'] / 1000000000, 2 - ), - 'sum_sent_retransmits': res_dict['end']['sum_sent']['retransmits'], + 'sent_bps': round(sent_Gbps, 2), + 'received_bps': round(received_Gbps, 2), + 'sent_bytes': round(sent_bytes_GB, 2), + 'received_bytes': round(received_bytes_GB, 2), + 'retransmits': retransmits, } return result # For UDP @@ -113,19 +121,29 @@ def store_result(self, result): """ store result in the DB """ - pass + metric = self._get_metric() + copied = result.copy() + iperf_result = copied.pop('iperf_result') + metric.write(iperf_result, extra_values=copied) def _get_metric(self): """ Gets or creates metric """ - pass + metric, created = self._get_or_create_metric() + if created: + self._create_charts(metric) + return metric def _create_charts(self, metric): """ Creates iperf related charts (Bandwith/Jitter) """ - pass + charts = ['bitrate', 'transfer', 'retransmits'] + for chart in charts: + chart = Chart(metric=metric, configuration=chart) + chart.full_clean() + chart.save() def _create_alert_settings(self, metric): pass diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 3bda6e0fb..46f913acc 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -18,6 +18,8 @@ 'IPERF_SERVERS', { # Running on my local + # Some Public Iperf Servers : https://iperf.fr/iperf-servers.php#public-servers + # 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['iperf.biznetnetworks.com'], 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['192.168.5.109'], # '': [''] }, diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 11f048096..a540a45d5 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -100,6 +100,29 @@ "object_id = '{object_id}' GROUP BY time(1d)" ) }, + 'bitrate': { + 'influxdb': ( + "SELECT MEAN(sent_bps) AS sent, " + "MEAN(received_bps) AS received FROM {key} WHERE " + "time >= '{time}' AND content_type = '{content_type}' AND " + "object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'transfer': { + 'influxdb': ( + "SELECT MEAN(sent_bytes) AS sent, " + "MEAN(received_bytes) AS received FROM {key} WHERE " + "time >= '{time}' AND content_type = '{content_type}' AND " + "object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'retransmits': { + 'influxdb': ( + "SELECT MEAN(retransmits) AS retransmits FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, } default_chart_query = [ diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 9c097e01c..77134fa17 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -542,6 +542,56 @@ def _get_access_tech(): } }, }, + 'iperf': { + 'label': _('Iperf'), + 'name': 'Iperf', + 'key': 'iperf', + 'field_name': 'iperf_result', + 'related_fields': [ + 'sent_bps', + 'received_bps', + 'sent_bytes', + 'received_bytes', + 'retransmits', + ], + 'charts': { + 'bitrate': { + 'type': 'stackedbar', + 'title': _('Bandwidth'), + 'description': _('Iperf3 bitrate in TCP mode.'), + 'summary_labels': [ + _('Sent bitrate'), + _('Received bitrate'), + ], + 'unit': _(' Gbps'), + 'order': 280, + 'query': chart_query['bitrate'], + 'colors': (DEFAULT_COLORS[5], DEFAULT_COLORS[9]), + }, + 'transfer': { + 'type': 'stackedbar', + 'title': _('Transfer'), + 'description': _('Iperf3 transfer in TCP mode.'), + 'summary_labels': [ + _('Sent bytes'), + _('Received bytes'), + ], + 'unit': _(' GB'), + 'order': 290, + 'query': chart_query['transfer'], + 'colors': (DEFAULT_COLORS[2], DEFAULT_COLORS[4]), + }, + 'retransmits': { + 'type': 'bar', + 'title': _('Retransmits'), + 'colors': (DEFAULT_COLORS[4]), + 'description': _('No. of retransmits during Iperf3 test in TCP mode.'), + 'unit': '', + 'order': 300, + 'query': chart_query['retransmits'], + }, + }, + }, } DEFAULT_CHARTS = {} From 488990b941bab99f046237bddf061d6a665d95de Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Thu, 16 Jun 2022 17:20:51 +0530 Subject: [PATCH 04/64] [feauture] UDP mode added --- openwisp_monitoring/check/classes/iperf.py | 97 ++++++++++++++----- .../db/backends/influxdb/queries.py | 22 +++++ .../monitoring/configuration.py | 54 ++++++++++- 3 files changed, 143 insertions(+), 30 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index d56280174..a8a4b34b0 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -11,7 +11,7 @@ Chart = load_model('monitoring', 'Chart') Metric = load_model('monitoring', 'Metric') Device = load_model('config', 'Device') -DeviceData = load_model('device_monitoring', 'DeviceData') +DeviceMonitoring = load_model('device_monitoring', 'DeviceMonitoring') Credentials = load_model('connection', 'Credentials') AlertSettings = load_model('monitoring', 'AlertSettings') DeviceConnection = load_model('connection', 'DeviceConnection') @@ -30,29 +30,33 @@ def check(self, store=True): res, exit_code = device_connection.connector_instance.exec_command( command, raise_unexpected_exit=False ) - if exit_code != 0: - print('---- Command Failed ----') - if store: - self.store_result( - { - 'iperf_result': 0, - 'sent_bps': 0.0, - 'received_bps': 0.0, - 'sent_bytes': 0.0, - 'received_bytes': 0.0, - 'retransmits': 0, - } - ) + if store and exit_code != 0: + print('---- Command Failed (TCP)----') + self.store_result_fail() device_connection.disconnect() return else: - result_dict = self._get_iperf_result(res) - print('---- Command Output ----') - print(result_dict) - if store: - self.store_result(result_dict) + result_dict_tcp = self._get_iperf_result(res, mode='TCP') + print('---- Command Output (TCP) ----') + print(result_dict_tcp) + # UDP + command = f'iperf3 -c {servers[0]} -u -J' + res, exit_code = device_connection.connector_instance.exec_command( + command, raise_unexpected_exit=False + ) + if store and exit_code != 0: + print('---- Command Failed (UDP) ----') + self.store_result_fail() + device_connection.disconnect() + return + else: + result_dict_udp = self._get_iperf_result(res, mode='UDP') + print('---- Command Output (UDP) ----') + print(result_dict_udp) + if store: + self.store_result({**result_dict_tcp, **result_dict_udp}) device_connection.disconnect() - return result_dict + return {**result_dict_tcp, **result_dict_udp} else: print( f'{self.related_object}: connection not properly set, Iperf skipped!' @@ -69,8 +73,13 @@ def _check_device_connection(self, device): """ openwrt_ssh = UPDATE_STRATEGIES[0][0] device_connection = DeviceConnection.objects.get(device_id=device.id) + device_monitoring = DeviceMonitoring.objects.get(device_id=device.id) if device_connection.update_strategy == openwrt_ssh: - if device_connection.enabled and device_connection.is_working: + if ( + device_connection.enabled + and device_connection.is_working + and device_monitoring.status in ['ok', 'problem'] + ): return device_connection else: return False @@ -84,12 +93,12 @@ def _get_iperf_servers(self, organization): org_servers = app_settings.IPERF_SERVERS.get(str(organization)) return org_servers - def _get_iperf_result(self, res, mode=None): + def _get_iperf_result(self, res, mode): """ Get iperf test result """ res_dict = json.loads(res) - if mode is None: + if mode == 'TCP': # Gbps = Gigabits per second # GB = GigaBytes sent_json = res_dict['end']['sum_sent'] @@ -114,8 +123,18 @@ def _get_iperf_result(self, res, mode=None): } return result # For UDP - else: - pass + elif mode == 'UDP': + jitter_ms = res_dict['end']['sum']['jitter_ms'] + packets = res_dict['end']['sum']['packets'] + lost_packets = res_dict['end']['sum']['lost_packets'] + lost_percent = res_dict['end']['sum']['lost_percent'] + result = { + 'jitter': round(jitter_ms, 2), + 'packets': packets, + 'lost_packets': lost_packets, + 'lost_percent': lost_percent, + } + return result def store_result(self, result): """ @@ -126,6 +145,25 @@ def store_result(self, result): iperf_result = copied.pop('iperf_result') metric.write(iperf_result, extra_values=copied) + def store_result_fail(self): + """ + store fail result in the DB + """ + self.store_result( + { + 'iperf_result': 0, + 'sent_bps': 0.0, + 'received_bps': 0.0, + 'sent_bytes': 0.0, + 'received_bytes': 0.0, + 'retransmits': 0, + 'jitter': 0.0, + 'packets': 0, + 'lost_packets': 0, + 'lost_percent': 0, + } + ) + def _get_metric(self): """ Gets or creates metric @@ -139,7 +177,14 @@ def _create_charts(self, metric): """ Creates iperf related charts (Bandwith/Jitter) """ - charts = ['bitrate', 'transfer', 'retransmits'] + charts = [ + 'bitrate', + 'transfer', + 'retransmits', + 'jitter', + 'datagram', + 'datagram_loss', + ] for chart in charts: chart = Chart(metric=metric, configuration=chart) chart.full_clean() diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index a540a45d5..7352cf4c3 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -123,6 +123,28 @@ "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, + 'jitter': { + 'influxdb': ( + "SELECT MEAN(jitter) AS jitter FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'datagram': { + 'influxdb': ( + "SELECT MEAN(lost_packets) AS lost_datagram," + "MEAN(packets) AS total_datagram FROM {key} WHERE " + "time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'datagram_loss': { + 'influxdb': ( + "SELECT MEAN(lost_percent) AS datagram_loss FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, } default_chart_query = [ diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 77134fa17..fdd877bbb 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -553,12 +553,16 @@ def _get_access_tech(): 'sent_bytes', 'received_bytes', 'retransmits', + 'jitter', + 'packets', + 'lost_packets', + 'lost_percent', ], 'charts': { 'bitrate': { 'type': 'stackedbar', 'title': _('Bandwidth'), - 'description': _('Iperf3 bitrate in TCP mode.'), + 'description': _('Bitrate during Iperf3 test in TCP mode.'), 'summary_labels': [ _('Sent bitrate'), _('Received bitrate'), @@ -571,7 +575,7 @@ def _get_access_tech(): 'transfer': { 'type': 'stackedbar', 'title': _('Transfer'), - 'description': _('Iperf3 transfer in TCP mode.'), + 'description': _('Total transfer during Iperf3 test in TCP mode.'), 'summary_labels': [ _('Sent bytes'), _('Received bytes'), @@ -584,12 +588,54 @@ def _get_access_tech(): 'retransmits': { 'type': 'bar', 'title': _('Retransmits'), - 'colors': (DEFAULT_COLORS[4]), + 'colors': [DEFAULT_COLORS[-3]], 'description': _('No. of retransmits during Iperf3 test in TCP mode.'), - 'unit': '', 'order': 300, 'query': chart_query['retransmits'], }, + 'jitter': { + 'type': 'scatter', + 'title': _('Jitter'), + 'description': _( + 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode' + ), + 'summary_labels': [ + _('Jitter'), + ], + 'unit': _(' ms'), + 'order': 310, + 'query': chart_query['jitter'], + 'colors': [DEFAULT_COLORS[4]], + }, + 'datagram': { + 'type': 'stackedbar', + 'title': _('Datagram'), + 'description': _( + 'Lost/total datagram ratio during Iperf3 test in UDP mode' + ), + 'summary_labels': [ + _('Lost datagram'), + _('Total datagram'), + ], + 'unit': _(''), + 'order': 320, + 'query': chart_query['datagram'], + 'colors': [DEFAULT_COLORS[3], DEFAULT_COLORS[2]], + }, + 'datagram_loss': { + 'type': 'scatter', + 'title': _('Datagram Loss'), + 'description': _( + 'Indicates datagram loss during Iperf3 test in UDP mode.' + ), + 'summary_labels': [ + _('Datagram loss'), + ], + 'unit': '%', + 'order': 330, + 'query': chart_query['datagram_loss'], + 'colors': [DEFAULT_COLORS[8]], + }, }, }, } From f890ba95c3291bff34875dad9213c00dfd345b65 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 20 Jun 2022 18:49:50 +0530 Subject: [PATCH 05/64] [chores] Removed unnecessary code --- openwisp_monitoring/check/classes/iperf.py | 30 +++++++------------ openwisp_monitoring/check/settings.py | 2 +- .../monitoring/configuration.py | 4 +-- 3 files changed, 14 insertions(+), 22 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index a8a4b34b0..03830ae21 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -1,4 +1,5 @@ import json +import logging from django.core.exceptions import ObjectDoesNotExist from swapper import load_model @@ -8,10 +9,11 @@ from .. import settings as app_settings from .base import BaseCheck +logger = logging.getLogger(__name__) + Chart = load_model('monitoring', 'Chart') Metric = load_model('monitoring', 'Metric') Device = load_model('config', 'Device') -DeviceMonitoring = load_model('device_monitoring', 'DeviceMonitoring') Credentials = load_model('connection', 'Credentials') AlertSettings = load_model('monitoring', 'AlertSettings') DeviceConnection = load_model('connection', 'DeviceConnection') @@ -24,47 +26,38 @@ def check(self, store=True): device_connection = self._check_device_connection(device) if device_connection: device_connection.connect() - print(f'--- [{self.related_object}] is connected! ---') servers = self._get_iperf_servers(device.organization.id) command = f'iperf3 -c {servers[0]} -J' res, exit_code = device_connection.connector_instance.exec_command( command, raise_unexpected_exit=False ) if store and exit_code != 0: - print('---- Command Failed (TCP)----') self.store_result_fail() device_connection.disconnect() return else: result_dict_tcp = self._get_iperf_result(res, mode='TCP') - print('---- Command Output (TCP) ----') - print(result_dict_tcp) # UDP command = f'iperf3 -c {servers[0]} -u -J' res, exit_code = device_connection.connector_instance.exec_command( command, raise_unexpected_exit=False ) if store and exit_code != 0: - print('---- Command Failed (UDP) ----') self.store_result_fail() device_connection.disconnect() return else: result_dict_udp = self._get_iperf_result(res, mode='UDP') - print('---- Command Output (UDP) ----') - print(result_dict_udp) if store: self.store_result({**result_dict_tcp, **result_dict_udp}) device_connection.disconnect() return {**result_dict_tcp, **result_dict_udp} else: - print( - f'{self.related_object}: connection not properly set, Iperf skipped!' - ) + logger.warning(f'{device}: connection not properly set, Iperf skipped!') return # If device have not active connection warning logged (return) except ObjectDoesNotExist: - print(f'{self.related_object}: has no active connection, Iperf skipped!') + logger.warning(f'{device}: connection not properly set, Iperf skipped!') return def _check_device_connection(self, device): @@ -73,12 +66,11 @@ def _check_device_connection(self, device): """ openwrt_ssh = UPDATE_STRATEGIES[0][0] device_connection = DeviceConnection.objects.get(device_id=device.id) - device_monitoring = DeviceMonitoring.objects.get(device_id=device.id) if device_connection.update_strategy == openwrt_ssh: if ( device_connection.enabled and device_connection.is_working - and device_monitoring.status in ['ok', 'problem'] + and device.monitoring.status in ['ok', 'problem'] ): return device_connection else: @@ -86,11 +78,11 @@ def _check_device_connection(self, device): else: return False - def _get_iperf_servers(self, organization): + def _get_iperf_servers(self, organization_id): """ Get iperf test servers """ - org_servers = app_settings.IPERF_SERVERS.get(str(organization)) + org_servers = app_settings.IPERF_SERVERS.get(str(organization_id)) return org_servers def _get_iperf_result(self, res, mode): @@ -127,12 +119,12 @@ def _get_iperf_result(self, res, mode): jitter_ms = res_dict['end']['sum']['jitter_ms'] packets = res_dict['end']['sum']['packets'] lost_packets = res_dict['end']['sum']['lost_packets'] - lost_percent = res_dict['end']['sum']['lost_percent'] + lost_percent = float(res_dict['end']['sum']['lost_percent']) result = { 'jitter': round(jitter_ms, 2), 'packets': packets, 'lost_packets': lost_packets, - 'lost_percent': lost_percent, + 'lost_percent': round(lost_percent, 2), } return result @@ -160,7 +152,7 @@ def store_result_fail(self): 'jitter': 0.0, 'packets': 0, 'lost_packets': 0, - 'lost_percent': 0, + 'lost_percent': 0.0, } ) diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 46f913acc..5cdedbde6 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -20,7 +20,7 @@ # Running on my local # Some Public Iperf Servers : https://iperf.fr/iperf-servers.php#public-servers # 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['iperf.biznetnetworks.com'], - 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['192.168.5.109'], + 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], # '': [''] }, ) diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index fdd877bbb..65db8bcfe 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -570,7 +570,7 @@ def _get_access_tech(): 'unit': _(' Gbps'), 'order': 280, 'query': chart_query['bitrate'], - 'colors': (DEFAULT_COLORS[5], DEFAULT_COLORS[9]), + 'colors': [DEFAULT_COLORS[5], DEFAULT_COLORS[9]], }, 'transfer': { 'type': 'stackedbar', @@ -583,7 +583,7 @@ def _get_access_tech(): 'unit': _(' GB'), 'order': 290, 'query': chart_query['transfer'], - 'colors': (DEFAULT_COLORS[2], DEFAULT_COLORS[4]), + 'colors': [DEFAULT_COLORS[2], DEFAULT_COLORS[4]], }, 'retransmits': { 'type': 'bar', From a6ec0dc06db4b7d14dce066b4210ca83f32fd8f0 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 21 Jun 2022 18:44:17 +0530 Subject: [PATCH 06/64] [refactor] Refactor device connection logic - Refactor device connection logic, removed try-except. - Removed redundant line from test_selenium.py. --- openwisp_monitoring/check/classes/iperf.py | 79 ++++++++++------------ openwisp_monitoring/tests/test_selenium.py | 4 +- 2 files changed, 35 insertions(+), 48 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 03830ae21..d63d714b0 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -1,7 +1,6 @@ import json import logging -from django.core.exceptions import ObjectDoesNotExist from swapper import load_model from openwisp_controller.connection.settings import UPDATE_STRATEGIES @@ -21,13 +20,26 @@ class Iperf(BaseCheck): def check(self, store=True): - try: - device = self.related_object - device_connection = self._check_device_connection(device) - if device_connection: - device_connection.connect() - servers = self._get_iperf_servers(device.organization.id) - command = f'iperf3 -c {servers[0]} -J' + device = self.related_object + device_connection = self._get_device_connection(device) + if not device_connection: + logger.warning(f'{device}: connection not properly set, Iperf skipped!') + return + else: + device_connection.connect() + servers = self._get_iperf_servers(device.organization.id) + command = f'iperf3 -c {servers[0]} -J' + res, exit_code = device_connection.connector_instance.exec_command( + command, raise_unexpected_exit=False + ) + if store and exit_code != 0: + self.store_result_fail() + device_connection.disconnect() + return + else: + result_dict_tcp = self._get_iperf_result(res, mode='TCP') + # UDP + command = f'iperf3 -c {servers[0]} -u -J' res, exit_code = device_connection.connector_instance.exec_command( command, raise_unexpected_exit=False ) @@ -36,47 +48,24 @@ def check(self, store=True): device_connection.disconnect() return else: - result_dict_tcp = self._get_iperf_result(res, mode='TCP') - # UDP - command = f'iperf3 -c {servers[0]} -u -J' - res, exit_code = device_connection.connector_instance.exec_command( - command, raise_unexpected_exit=False - ) - if store and exit_code != 0: - self.store_result_fail() - device_connection.disconnect() - return - else: - result_dict_udp = self._get_iperf_result(res, mode='UDP') - if store: - self.store_result({**result_dict_tcp, **result_dict_udp}) - device_connection.disconnect() - return {**result_dict_tcp, **result_dict_udp} - else: - logger.warning(f'{device}: connection not properly set, Iperf skipped!') - return - # If device have not active connection warning logged (return) - except ObjectDoesNotExist: - logger.warning(f'{device}: connection not properly set, Iperf skipped!') - return + result_dict_udp = self._get_iperf_result(res, mode='UDP') + if store: + self.store_result({**result_dict_tcp, **result_dict_udp}) + device_connection.disconnect() + return {**result_dict_tcp, **result_dict_udp} - def _check_device_connection(self, device): + def _get_device_connection(self, device): """ - Check device has an active connection with right update_strategy(ssh) + Returns an active SSH DeviceConnection for a device. """ openwrt_ssh = UPDATE_STRATEGIES[0][0] - device_connection = DeviceConnection.objects.get(device_id=device.id) - if device_connection.update_strategy == openwrt_ssh: - if ( - device_connection.enabled - and device_connection.is_working - and device.monitoring.status in ['ok', 'problem'] - ): - return device_connection - else: - return False - else: - return False + device_connection = DeviceConnection.objects.filter( + device_id=device.id, + update_strategy=openwrt_ssh, + enabled=True, + is_working=True, + ).first() + return device_connection def _get_iperf_servers(self, organization_id): """ diff --git a/openwisp_monitoring/tests/test_selenium.py b/openwisp_monitoring/tests/test_selenium.py index 8de1c8b2b..40685c836 100644 --- a/openwisp_monitoring/tests/test_selenium.py +++ b/openwisp_monitoring/tests/test_selenium.py @@ -89,8 +89,6 @@ def test_restoring_deleted_device(self): org = self._get_org() self._create_credentials(auto_add=True, organization=org) device = self._create_config(organization=org).device - # Delete iperf check to prevent unnecessary response timeout - Check.objects.filter(check_type__endswith='Iperf').delete() device_data = DeviceData.objects.get(id=device.id) device_checks = device_data.checks.all() for check in device_checks: @@ -104,7 +102,7 @@ def test_restoring_deleted_device(self): ).values_list('id', flat=True) self.assertEqual(len(device_alert_setting_ids), 2) self.assertEqual(len(device_metric_ids), 2) - self.assertEqual(len(device_checks), 2) + self.assertEqual(len(device_checks), 3) self.assertEqual(len(device_chart_ids), 3) self.login() From f3a79a5dff088d7e1d2aef7d339c9aef0b201911 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 22 Jun 2022 00:38:29 +0530 Subject: [PATCH 07/64] [chores] Move Iperf server settings to project settings --- openwisp_monitoring/check/settings.py | 11 +---------- tests/openwisp2/settings.py | 8 ++++++++ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 5cdedbde6..e04061cdf 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -14,13 +14,4 @@ PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) # By default it should be disabled. AUTO_IPERF = get_settings_value('AUTO_IPERF', True) -IPERF_SERVERS = get_settings_value( - 'IPERF_SERVERS', - { - # Running on my local - # Some Public Iperf Servers : https://iperf.fr/iperf-servers.php#public-servers - # 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['iperf.biznetnetworks.com'], - 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], - # '': [''] - }, -) +IPERF_SERVERS = get_settings_value('IPERF_SERVERS', {}) diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index bedb5e401..1b7e84006 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -271,6 +271,14 @@ # Celery auto detects tasks only from INSTALLED_APPS CELERY_IMPORTS = ('openwisp_monitoring.device.tasks',) +OPENWISP_MONITORING_IPERF_SERVERS = { + # Running on my local + # Some Public Iperf Servers : https://iperf.fr/iperf-servers.php#public-servers + # 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['iperf.biznetnetworks.com'], + 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], + # '': [''] +} + # local settings must be imported before test runner otherwise they'll be ignored try: from openwisp2.local_settings import * From 69f63fcaa77527c3a24efe88be179339a412b4dc Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 22 Jun 2022 13:14:26 +0530 Subject: [PATCH 08/64] [chores] Remove redundant else in guard clause --- openwisp_monitoring/check/classes/iperf.py | 37 +++++++++++----------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index d63d714b0..b9797e905 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -25,10 +25,20 @@ def check(self, store=True): if not device_connection: logger.warning(f'{device}: connection not properly set, Iperf skipped!') return + device_connection.connect() + servers = self._get_iperf_servers(device.organization.id) + command = f'iperf3 -c {servers[0]} -J' + res, exit_code = device_connection.connector_instance.exec_command( + command, raise_unexpected_exit=False + ) + if store and exit_code != 0: + self.store_result_fail() + device_connection.disconnect() + return else: - device_connection.connect() - servers = self._get_iperf_servers(device.organization.id) - command = f'iperf3 -c {servers[0]} -J' + result_dict_tcp = self._get_iperf_result(res, mode='TCP') + # UDP + command = f'iperf3 -c {servers[0]} -u -J' res, exit_code = device_connection.connector_instance.exec_command( command, raise_unexpected_exit=False ) @@ -37,22 +47,11 @@ def check(self, store=True): device_connection.disconnect() return else: - result_dict_tcp = self._get_iperf_result(res, mode='TCP') - # UDP - command = f'iperf3 -c {servers[0]} -u -J' - res, exit_code = device_connection.connector_instance.exec_command( - command, raise_unexpected_exit=False - ) - if store and exit_code != 0: - self.store_result_fail() - device_connection.disconnect() - return - else: - result_dict_udp = self._get_iperf_result(res, mode='UDP') - if store: - self.store_result({**result_dict_tcp, **result_dict_udp}) - device_connection.disconnect() - return {**result_dict_tcp, **result_dict_udp} + result_dict_udp = self._get_iperf_result(res, mode='UDP') + if store: + self.store_result({**result_dict_tcp, **result_dict_udp}) + device_connection.disconnect() + return {**result_dict_tcp, **result_dict_udp} def _get_device_connection(self, device): """ From 9a346efc3fafe216a30b5a0c957f594d822c704d Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Thu, 23 Jun 2022 15:25:31 +0530 Subject: [PATCH 09/64] [fix] Fixed iperf check execution if device connection not working #399 - We need to check device_connection is_working just right after connect(). - because it may be possible that authentication (publickey) failed. - or any other failure happened during connect(). Fixes #399 --- openwisp_monitoring/check/classes/iperf.py | 6 ++++++ tests/openwisp2/settings.py | 3 ++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index b9797e905..5c10a45c4 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -26,6 +26,12 @@ def check(self, store=True): logger.warning(f'{device}: connection not properly set, Iperf skipped!') return device_connection.connect() + # We need to check device_connection is_working just right after connect(). + # because it may be possible that authentication (publickey) failed. + # or any other failure happened during connect(). + if not device_connection.is_working: + logger.warning(f'{device}: SSH connection is not working, Iperf skipped!') + return servers = self._get_iperf_servers(device.organization.id) command = f'iperf3 -c {servers[0]} -J' res, exit_code = device_connection.connector_instance.exec_command( diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 1b7e84006..21db9dab6 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -275,7 +275,8 @@ # Running on my local # Some Public Iperf Servers : https://iperf.fr/iperf-servers.php#public-servers # 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['iperf.biznetnetworks.com'], - 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], + # 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], + 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['192.168.5.109'], # '': [''] } From 9383f3a340945d316f3f4d20e7f3fdce7cbe1b97 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 24 Jun 2022 13:21:45 +0530 Subject: [PATCH 10/64] [chores] Minor changes in configuration - Removed redundant test code. --- openwisp_monitoring/device/tests/test_transactions.py | 2 -- openwisp_monitoring/monitoring/configuration.py | 3 ++- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openwisp_monitoring/device/tests/test_transactions.py b/openwisp_monitoring/device/tests/test_transactions.py index 1cbdb5165..5fd3d9698 100644 --- a/openwisp_monitoring/device/tests/test_transactions.py +++ b/openwisp_monitoring/device/tests/test_transactions.py @@ -62,8 +62,6 @@ def test_trigger_device_recovery_task_regression( dm = self._create_device_monitoring() dm.device.management_ip = None dm.device.save() - # Delete iperf check to prevent unnecessary response timeout - Check.objects.filter(check_type__endswith='Iperf').delete() trigger_device_checks.delay(dm.device.pk) self.assertTrue(Check.objects.exists()) # we expect update_status() to be called once (by the check) diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 65db8bcfe..11b8b7efb 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -590,6 +590,7 @@ def _get_access_tech(): 'title': _('Retransmits'), 'colors': [DEFAULT_COLORS[-3]], 'description': _('No. of retransmits during Iperf3 test in TCP mode.'), + 'unit': '', 'order': 300, 'query': chart_query['retransmits'], }, @@ -617,7 +618,7 @@ def _get_access_tech(): _('Lost datagram'), _('Total datagram'), ], - 'unit': _(''), + 'unit': '', 'order': 320, 'query': chart_query['datagram'], 'colors': [DEFAULT_COLORS[3], DEFAULT_COLORS[2]], From bf7dfd5fd259da12054d149fa60edee448b68eba Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 24 Jun 2022 18:31:17 +0530 Subject: [PATCH 11/64] [requested-changes] Changed chart conf and added log warnings - Changed transfer chart configuration - Added more descriptive log warnings. - Logged non-zero exit code warnings. --- openwisp_monitoring/check/classes/iperf.py | 21 ++++++++++++------- .../db/backends/influxdb/queries.py | 5 +++-- .../monitoring/configuration.py | 15 +++++++++---- 3 files changed, 28 insertions(+), 13 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 5c10a45c4..38f12562b 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -23,14 +23,15 @@ def check(self, store=True): device = self.related_object device_connection = self._get_device_connection(device) if not device_connection: - logger.warning(f'{device}: connection not properly set, Iperf skipped!') + logger.warning( + f'DeviceConnection is not properly set for "{device}", iperf check skipped!' + ) return - device_connection.connect() - # We need to check device_connection is_working just right after connect(). - # because it may be possible that authentication (publickey) failed. - # or any other failure happened during connect(). - if not device_connection.is_working: - logger.warning(f'{device}: SSH connection is not working, Iperf skipped!') + # The DeviceConnection could fail if the management tunnel is down. + if not device_connection.connect(): + logger.warning( + f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' + ) return servers = self._get_iperf_servers(device.organization.id) command = f'iperf3 -c {servers[0]} -J' @@ -38,6 +39,9 @@ def check(self, store=True): command, raise_unexpected_exit=False ) if store and exit_code != 0: + logger.warning( + f'Iperf check failed for "{device}", {json.loads(res)["error"]}' + ) self.store_result_fail() device_connection.disconnect() return @@ -49,6 +53,9 @@ def check(self, store=True): command, raise_unexpected_exit=False ) if store and exit_code != 0: + logger.warning( + f'Iperf check failed for "{device}", {json.loads(res)["error"]}' + ) self.store_result_fail() device_connection.disconnect() return diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 7352cf4c3..ec4a575db 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -110,8 +110,9 @@ }, 'transfer': { 'influxdb': ( - "SELECT MEAN(sent_bytes) AS sent, " - "MEAN(received_bytes) AS received FROM {key} WHERE " + "SELECT SUM(received_bytes) AS received, " + "SUM(sent_bytes) AS sent," + "SUM(sent_bytes) + SUM(received_bytes) AS total FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d)" ) diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 11b8b7efb..0a5f8dc4a 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -573,17 +573,24 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[5], DEFAULT_COLORS[9]], }, 'transfer': { - 'type': 'stackedbar', + 'type': 'stackedbar+lines', 'title': _('Transfer'), - 'description': _('Total transfer during Iperf3 test in TCP mode.'), + 'trace_type': { + 'received': 'stackedbar', + 'sent': 'stackedbar', + 'total': 'lines', + }, + 'trace_order': ['total', 'received', 'sent'], + 'description': _('Transfer during Iperf3 test in TCP mode.'), 'summary_labels': [ - _('Sent bytes'), + _('Total bytes'), _('Received bytes'), + _('Sent bytes'), ], 'unit': _(' GB'), 'order': 290, 'query': chart_query['transfer'], - 'colors': [DEFAULT_COLORS[2], DEFAULT_COLORS[4]], + 'colors': [DEFAULT_COLORS[7], DEFAULT_COLORS[2], DEFAULT_COLORS[4]], }, 'retransmits': { 'type': 'bar', From 9d98a66d46039c27f3048af2366aec579e681cc4 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 27 Jun 2022 01:17:55 +0530 Subject: [PATCH 12/64] [change] Handled iperf in each mode --- openwisp_monitoring/check/classes/iperf.py | 64 +++++++++++----------- 1 file changed, 33 insertions(+), 31 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 38f12562b..21d5f5e9c 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -38,33 +38,34 @@ def check(self, store=True): res, exit_code = device_connection.connector_instance.exec_command( command, raise_unexpected_exit=False ) + if exit_code != 0: + logger.warning( + f'Iperf check failed for "{device}", {json.loads(res)["error"]}' + ) + result_tcp = self._get_fail_result(mode='TCP') + else: + result_tcp = self._get_iperf_result(res, mode='TCP') + command = f'iperf3 -c {servers[0]} -u -J' + res, exit_code = device_connection.connector_instance.exec_command( + command, raise_unexpected_exit=False + ) if store and exit_code != 0: logger.warning( f'Iperf check failed for "{device}", {json.loads(res)["error"]}' ) - self.store_result_fail() + result_udp = self._get_fail_result(mode='UDP') + iperf_result = result_tcp['iperf_result'] + self.store_result( + {**result_tcp, **result_udp, 'iperf_result': iperf_result} + ) device_connection.disconnect() return else: - result_dict_tcp = self._get_iperf_result(res, mode='TCP') - # UDP - command = f'iperf3 -c {servers[0]} -u -J' - res, exit_code = device_connection.connector_instance.exec_command( - command, raise_unexpected_exit=False - ) - if store and exit_code != 0: - logger.warning( - f'Iperf check failed for "{device}", {json.loads(res)["error"]}' - ) - self.store_result_fail() - device_connection.disconnect() - return - else: - result_dict_udp = self._get_iperf_result(res, mode='UDP') - if store: - self.store_result({**result_dict_tcp, **result_dict_udp}) - device_connection.disconnect() - return {**result_dict_tcp, **result_dict_udp} + result_udp = self._get_iperf_result(res, mode='UDP') + if store: + self.store_result({**result_tcp, **result_udp}) + device_connection.disconnect() + return {**result_tcp, **result_udp} def _get_device_connection(self, device): """ @@ -105,8 +106,7 @@ def _get_iperf_result(self, res, mode): received_bps = recv_json['bits_per_second'] received_Gbps = received_bps / 1000000000 retransmits = sent_json['retransmits'] - - result = { + return { 'iperf_result': 1, 'sent_bps': round(sent_Gbps, 2), 'received_bps': round(received_Gbps, 2), @@ -114,20 +114,19 @@ def _get_iperf_result(self, res, mode): 'received_bytes': round(received_bytes_GB, 2), 'retransmits': retransmits, } - return result - # For UDP + elif mode == 'UDP': jitter_ms = res_dict['end']['sum']['jitter_ms'] packets = res_dict['end']['sum']['packets'] lost_packets = res_dict['end']['sum']['lost_packets'] lost_percent = float(res_dict['end']['sum']['lost_percent']) - result = { + return { + 'iperf_result': 1, 'jitter': round(jitter_ms, 2), 'packets': packets, 'lost_packets': lost_packets, 'lost_percent': round(lost_percent, 2), } - return result def store_result(self, result): """ @@ -138,24 +137,27 @@ def store_result(self, result): iperf_result = copied.pop('iperf_result') metric.write(iperf_result, extra_values=copied) - def store_result_fail(self): + def _get_fail_result(self, mode): """ - store fail result in the DB + Get fail test result in the DB """ - self.store_result( - { + if mode == 'TCP': + return { 'iperf_result': 0, 'sent_bps': 0.0, 'received_bps': 0.0, 'sent_bytes': 0.0, 'received_bytes': 0.0, 'retransmits': 0, + } + elif mode == 'UDP': + return { + 'iperf_result': 0, 'jitter': 0.0, 'packets': 0, 'lost_packets': 0, 'lost_percent': 0.0, } - ) def _get_metric(self): """ From 3050edf18d15c9679e0fe4047883d28ad502ac87 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 27 Jun 2022 18:05:19 +0530 Subject: [PATCH 13/64] [refactor] Iperf check class #405 Closes #405 --- openwisp_monitoring/check/classes/iperf.py | 161 ++++++++++----------- 1 file changed, 80 insertions(+), 81 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 21d5f5e9c..437e7cece 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -34,38 +34,25 @@ def check(self, store=True): ) return servers = self._get_iperf_servers(device.organization.id) + + # TCP mode command = f'iperf3 -c {servers[0]} -J' - res, exit_code = device_connection.connector_instance.exec_command( - command, raise_unexpected_exit=False - ) - if exit_code != 0: - logger.warning( - f'Iperf check failed for "{device}", {json.loads(res)["error"]}' - ) - result_tcp = self._get_fail_result(mode='TCP') - else: - result_tcp = self._get_iperf_result(res, mode='TCP') + res, exit_code = self._exec_command(device_connection, command) + result_tcp = self._get_iperf_result(res, exit_code, device, mode='TCP') + + # UDP mode command = f'iperf3 -c {servers[0]} -u -J' - res, exit_code = device_connection.connector_instance.exec_command( - command, raise_unexpected_exit=False - ) - if store and exit_code != 0: - logger.warning( - f'Iperf check failed for "{device}", {json.loads(res)["error"]}' - ) - result_udp = self._get_fail_result(mode='UDP') - iperf_result = result_tcp['iperf_result'] + res, exit_code = self._exec_command(device_connection, command) + result_udp = self._get_iperf_result(res, exit_code, device, mode='UDP') + + if store: + # Store iperf_result field 1 if any mode passes, store 0 when both fails + iperf_result = result_tcp['iperf_result'] | result_udp['iperf_result'] self.store_result( {**result_tcp, **result_udp, 'iperf_result': iperf_result} ) - device_connection.disconnect() - return - else: - result_udp = self._get_iperf_result(res, mode='UDP') - if store: - self.store_result({**result_tcp, **result_udp}) device_connection.disconnect() - return {**result_tcp, **result_udp} + return {**result_tcp, **result_udp, 'iperf_result': iperf_result} def _get_device_connection(self, device): """ @@ -87,46 +74,80 @@ def _get_iperf_servers(self, organization_id): org_servers = app_settings.IPERF_SERVERS.get(str(organization_id)) return org_servers - def _get_iperf_result(self, res, mode): + def _exec_command(self, dc, command): + """ + Executes device command + """ + return dc.connector_instance.exec_command(command, raise_unexpected_exit=False) + + def _get_iperf_result(self, res, exit_code, device, mode): """ Get iperf test result """ + res_dict = json.loads(res) if mode == 'TCP': - # Gbps = Gigabits per second - # GB = GigaBytes - sent_json = res_dict['end']['sum_sent'] - recv_json = res_dict['end']['sum_received'] - sent_bytes = sent_json['bytes'] - sent_bytes_GB = sent_bytes / 1000000000 - sent_bps = sent_json['bits_per_second'] - sent_Gbps = sent_bps / 1000000000 - received_bytes = recv_json['bytes'] - received_bytes_GB = received_bytes / 1000000000 - received_bps = recv_json['bits_per_second'] - received_Gbps = received_bps / 1000000000 - retransmits = sent_json['retransmits'] - return { - 'iperf_result': 1, - 'sent_bps': round(sent_Gbps, 2), - 'received_bps': round(received_Gbps, 2), - 'sent_bytes': round(sent_bytes_GB, 2), - 'received_bytes': round(received_bytes_GB, 2), - 'retransmits': retransmits, - } + if exit_code != 0: + logger.warning( + f'Iperf check failed for "{device}", {res_dict["error"]}' + ) + return { + 'iperf_result': 0, + 'sent_bps': 0.0, + 'received_bps': 0.0, + 'sent_bytes': 0.0, + 'received_bytes': 0.0, + 'retransmits': 0, + } + else: + # Gbps = Gigabits per second + # GB = GigaBytes + # Todo : Remove below coversion once + # https://github.com/openwisp/openwisp-monitoring/pull/397 get merged + sent_json = res_dict['end']['sum_sent'] + recv_json = res_dict['end']['sum_received'] + sent_bytes = sent_json['bytes'] + sent_bytes_GB = sent_bytes / 1000000000 + sent_bps = sent_json['bits_per_second'] + sent_Gbps = sent_bps / 1000000000 + received_bytes = recv_json['bytes'] + received_bytes_GB = received_bytes / 1000000000 + received_bps = recv_json['bits_per_second'] + received_Gbps = received_bps / 1000000000 + retransmits = sent_json['retransmits'] + return { + 'iperf_result': 1, + 'sent_bps': round(sent_Gbps, 2), + 'received_bps': round(received_Gbps, 2), + 'sent_bytes': round(sent_bytes_GB, 2), + 'received_bytes': round(received_bytes_GB, 2), + 'retransmits': retransmits, + } elif mode == 'UDP': - jitter_ms = res_dict['end']['sum']['jitter_ms'] - packets = res_dict['end']['sum']['packets'] - lost_packets = res_dict['end']['sum']['lost_packets'] - lost_percent = float(res_dict['end']['sum']['lost_percent']) - return { - 'iperf_result': 1, - 'jitter': round(jitter_ms, 2), - 'packets': packets, - 'lost_packets': lost_packets, - 'lost_percent': round(lost_percent, 2), - } + if exit_code != 0: + logger.warning( + f'Iperf check failed for "{device}", {res_dict["error"]}' + ) + return { + 'iperf_result': 0, + 'jitter': 0.0, + 'packets': 0, + 'lost_packets': 0, + 'lost_percent': 0.0, + } + else: + jitter_ms = res_dict['end']['sum']['jitter_ms'] + packets = res_dict['end']['sum']['packets'] + lost_packets = res_dict['end']['sum']['lost_packets'] + lost_percent = float(res_dict['end']['sum']['lost_percent']) + return { + 'iperf_result': 1, + 'jitter': round(jitter_ms, 2), + 'packets': packets, + 'lost_packets': lost_packets, + 'lost_percent': round(lost_percent, 2), + } def store_result(self, result): """ @@ -137,28 +158,6 @@ def store_result(self, result): iperf_result = copied.pop('iperf_result') metric.write(iperf_result, extra_values=copied) - def _get_fail_result(self, mode): - """ - Get fail test result in the DB - """ - if mode == 'TCP': - return { - 'iperf_result': 0, - 'sent_bps': 0.0, - 'received_bps': 0.0, - 'sent_bytes': 0.0, - 'received_bytes': 0.0, - 'retransmits': 0, - } - elif mode == 'UDP': - return { - 'iperf_result': 0, - 'jitter': 0.0, - 'packets': 0, - 'lost_packets': 0, - 'lost_percent': 0.0, - } - def _get_metric(self): """ Gets or creates metric From efdc70a65c784586df0ef7e5e162f8c84b4c914c Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 28 Jun 2022 19:24:37 +0530 Subject: [PATCH 14/64] [tests] Added tests for iperf check --- openwisp_monitoring/check/classes/iperf.py | 8 +- openwisp_monitoring/check/tests/test_iperf.py | 74 +++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 openwisp_monitoring/check/tests/test_iperf.py diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 437e7cece..2955374eb 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -28,7 +28,7 @@ def check(self, store=True): ) return # The DeviceConnection could fail if the management tunnel is down. - if not device_connection.connect(): + if not self._connect(device_connection): logger.warning( f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' ) @@ -80,6 +80,12 @@ def _exec_command(self, dc, command): """ return dc.connector_instance.exec_command(command, raise_unexpected_exit=False) + def _connect(self, dc): + """ + Connects device returns its working status + """ + return dc.connect() + def _get_iperf_result(self, res, exit_code, device, mode): """ Get iperf test result diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py new file mode 100644 index 000000000..bca6a991b --- /dev/null +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -0,0 +1,74 @@ +from unittest import mock + +from django.test import TransactionTestCase +from swapper import load_model + +from openwisp_controller.connection.tests.utils import CreateConnectionsMixin, SshServer +from openwisp_monitoring.check.classes.iperf import logger as iperf_logger + +from ...device.tests import TestDeviceMonitoringMixin +from .. import settings +from ..classes import Iperf + +Chart = load_model('monitoring', 'Chart') +AlertSettings = load_model('monitoring', 'AlertSettings') +Metric = load_model('monitoring', 'Metric') +Check = load_model('check', 'Check') + + +class TestIperf(CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.mock_ssh_server = SshServer( + {'root': cls._TEST_RSA_PRIVATE_KEY_PATH} + ).__enter__() + cls.ssh_server.port = cls.mock_ssh_server.port + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + cls.mock_ssh_server.__exit__() + + _IPERF = settings.CHECK_CLASSES[2][0] + _RESULT_KEYS = [ + 'iperf_result', + 'sent_bps', + 'received_bps', + 'sent_bytes', + 'received_bytes', + 'retransmits', + 'jitter', + 'packets', + 'lost_packets', + 'lost_percent', + ] + + @mock.patch.object(iperf_logger, 'warning') + def test_iperf_get_device_connection(self, mock_warn): + ckey = self._create_credentials_with_key(port=self.ssh_server.port) + device = self._create_device() + self._create_config(device=device) + dc = self._create_device_connection(device=device, credentials=ckey) + check = Check( + name='Iperf check', + check_type=self._IPERF, + content_object=device, + ) + with self.subTest('Test inactive or invalid device connection'): + check.perform_check(store=False) + mock_warn.assert_called_with( + f'DeviceConnection is not properly set for "{device}", iperf check skipped!' + ) + with self.subTest('Test active device connection when management tunnel down'): + dc.is_working = True + dc.save() + # Todo : Need to change this mock + with mock.patch.object( + Iperf, '_connect', return_value=False + ) as mocked_connect: + check.perform_check(store=False) + mock_warn.assert_called_with( + f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' + ) + self.assertEqual(mocked_connect.call_count, 1) From 0f65c6adcfd92abcf5c17706796d98dd3549a74e Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 29 Jun 2022 00:13:36 +0530 Subject: [PATCH 15/64] [docs] Added Iperf check instructions --- README.rst | 117 ++++++++++++++++++ openwisp_monitoring/check/classes/iperf.py | 2 +- .../monitoring/configuration.py | 2 +- 3 files changed, 119 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index cdb642dbf..3ead77686 100644 --- a/README.rst +++ b/README.rst @@ -803,6 +803,51 @@ Mobile Access Technology in use .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/access-technology.png :align: center +Iperf +~~~~~ + ++--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **measurement**: | ``iperf`` | ++--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **types**: | ``int`` (iperf_result, retransmits, packets, lost_packets), ``float`` (sent_bps, received_bps, sent_bytes, received_bytes, jitter, lost_percent) | ++--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **fields**: | ``iperf_result``, ``sent_bps``, ``received_bps``, ``sent_bytes``, ``received_bytes``, ``retransmits``, ``jitter``, ``packets``, ``lost_packets``, ``lost_percent``| ++--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **configuration**: | ``iperf`` | ++--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **charts**: | ``bandwidth``, ``transfer``, ``retransmits``, ``jitter``, ``datagram``, ``datagram_loss`` | ++--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +**Bandwidth**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/bandwidth.png + :align: center + +**Transfer**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/transfer.png + :align: center + +**Retransmits**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/retransmits.png + :align: center + +**Jitter**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/jitter.png + :align: center + +**Datagram**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/datagram.png + :align: center + +**Datagram loss**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/datagram-loss.png + :align: center + Dashboard Monitoring Charts --------------------------- @@ -939,6 +984,66 @@ configuration status of a device changes, this ensures the check reacts quickly to events happening in the network and informs the user promptly if there's anything that is not working as intended. +Iperf +~~~~~ + +This check provides network performance measurements such as maximum achievable bandwidth, jitter, datagram loss etc of the device using `iperf3 utility `_. + +It also supports tuning of various parameters related to timing, buffers and protocols (TCP, UDP with IPv4 and IPv6). + +This check is ``disabled`` by **default**, but you may choose to enable auto creation of this check by setting +`OPENWISP_MONITORING_AUTO_IPERF <#OPENWISP_MONITORING_AUTO_IPERF>`_ to ``True``. + +Instructions to configure Iperf Check +------------------------------------- + +1. Register your device to OpenWISP +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Register your device to OpenWISP and make sure `iperf3 openwrt package `_ is installed on the device if not run : + +.. code-block:: shell + + opkg install iperf3 + +1. Enable secure SSH access from OpenWISP to your devices +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We have already well covered this in `How to configure push updates section of openwisp-controller `_. + +**Note:** Make sure device connection is enabled & working with right update strategy ie. ``OpenWISP SSH``. + +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/enable-openwisp-ssh.png + :alt: Enable ssh access from openwisp to device + :align: center + +3. Configure Iperf settings +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Configure iperf servers in `openwisp settings `_ , The host can be specified by hostname, IPv4 literal, or IPv6 literal. + +For eg. + +.. code-block:: python + + OPENWISP_MONITORING_IPERF_SERVERS = { + # Public iperf servers also available: https://iperf.fr/iperf-servers.php#public-servers + # '': [''] + 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], + 'z9734710-db30-46b0-a2fc-01f01046fe4f': ['192.168.5.109'], + 'c9734710-db30-46b0-a2fc-01f01046fe4f': ['2001:db8::1'], + } + +4. Run the check +~~~~~~~~~~~~~~~~ + +This should happen automatically if you have celery running in the background. For testing, you can +run this check manually using the `run_checks <#run_checks>`_ command. After that, you should see the +iperf network measurements charts. + +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/iperf-charts.png + :alt: Iperf network measurement charts + Settings -------- @@ -1033,6 +1138,18 @@ validating custom parameters of a ``Check`` object. This setting allows you to choose whether `config_applied <#configuration-applied>`_ checks should be created automatically for newly registered devices. It's enabled by default. +``OPENWISP_MONITORING_AUTO_IPERF`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------+ +| **type**: | ``bool`` | ++--------------+-------------+ +| **default**: | ``False`` | ++--------------+-------------+ + +Whether `Iperf <#iperf-1>`_ checks are created automatically for devices. The devices need to have SSH access from OpenWISP & must installed +`iperf3 openwrt package `_ on devices in order for this check to work. + ``OPENWISP_MONITORING_AUTO_CHARTS`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 2955374eb..cf140a1f0 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -178,7 +178,7 @@ def _create_charts(self, metric): Creates iperf related charts (Bandwith/Jitter) """ charts = [ - 'bitrate', + 'bandwidth', 'transfer', 'retransmits', 'jitter', diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 0a5f8dc4a..dd206e5b4 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -559,7 +559,7 @@ def _get_access_tech(): 'lost_percent', ], 'charts': { - 'bitrate': { + 'bandwidth': { 'type': 'stackedbar', 'title': _('Bandwidth'), 'description': _('Bitrate during Iperf3 test in TCP mode.'), From 7396e79b0c9f8b7ae0c211bdeb025aba5737325f Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 29 Jun 2022 20:25:37 +0530 Subject: [PATCH 16/64] [requested-changes] Added bandwidth and transfer charts (UDP) --- openwisp_monitoring/check/classes/iperf.py | 37 +++++++----- .../db/backends/influxdb/queries.py | 30 +++++++--- .../monitoring/configuration.py | 57 +++++++++++++------ 3 files changed, 86 insertions(+), 38 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index cf140a1f0..e0d75e04d 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -99,10 +99,10 @@ def _get_iperf_result(self, res, exit_code, device, mode): ) return { 'iperf_result': 0, - 'sent_bps': 0.0, - 'received_bps': 0.0, - 'sent_bytes': 0.0, - 'received_bytes': 0.0, + 'sent_bps_tcp': 0.0, + 'received_bps_tcp': 0.0, + 'sent_bytes_tcp': 0.0, + 'received_bytes_tcp': 0.0, 'retransmits': 0, } else: @@ -123,10 +123,10 @@ def _get_iperf_result(self, res, exit_code, device, mode): retransmits = sent_json['retransmits'] return { 'iperf_result': 1, - 'sent_bps': round(sent_Gbps, 2), - 'received_bps': round(received_Gbps, 2), - 'sent_bytes': round(sent_bytes_GB, 2), - 'received_bytes': round(received_bytes_GB, 2), + 'sent_bps_tcp': round(sent_Gbps, 2), + 'received_bps_tcp': round(received_Gbps, 2), + 'sent_bytes_tcp': round(sent_bytes_GB, 2), + 'received_bytes_tcp': round(received_bytes_GB, 2), 'retransmits': retransmits, } @@ -137,20 +137,29 @@ def _get_iperf_result(self, res, exit_code, device, mode): ) return { 'iperf_result': 0, + 'sent_bps_udp': 0.0, + 'sent_bytes_udp': 0.0, 'jitter': 0.0, - 'packets': 0, + 'total_packets': 0, 'lost_packets': 0, 'lost_percent': 0.0, } else: + sent_bytes = res_dict['end']['sum']['bytes'] + sent_bytes_MB = sent_bytes / 1000000 + sent_bps = res_dict['end']['sum']['bytes'] + sent_Mbps = sent_bps / 1000000 jitter_ms = res_dict['end']['sum']['jitter_ms'] - packets = res_dict['end']['sum']['packets'] + jitter_ms = res_dict['end']['sum']['jitter_ms'] + total_packets = res_dict['end']['sum']['packets'] lost_packets = res_dict['end']['sum']['lost_packets'] lost_percent = float(res_dict['end']['sum']['lost_percent']) return { 'iperf_result': 1, + 'sent_bps_udp': round(sent_Mbps, 2), + 'sent_bytes_udp': round(sent_bytes_MB, 2), 'jitter': round(jitter_ms, 2), - 'packets': packets, + 'total_packets': total_packets, 'lost_packets': lost_packets, 'lost_percent': round(lost_percent, 2), } @@ -178,9 +187,11 @@ def _create_charts(self, metric): Creates iperf related charts (Bandwith/Jitter) """ charts = [ - 'bandwidth', - 'transfer', + 'bandwidth_tcp', + 'transfer_tcp', 'retransmits', + 'bandwidth_udp', + 'transfer_udp', 'jitter', 'datagram', 'datagram_loss', diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index ec4a575db..cf3454f55 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -100,19 +100,19 @@ "object_id = '{object_id}' GROUP BY time(1d)" ) }, - 'bitrate': { + 'bandwidth_tcp': { 'influxdb': ( - "SELECT MEAN(sent_bps) AS sent, " - "MEAN(received_bps) AS received FROM {key} WHERE " + "SELECT MEAN(sent_bps_tcp) AS sent, " + "MEAN(received_bps_tcp) AS received FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d)" ) }, - 'transfer': { + 'transfer_tcp': { 'influxdb': ( - "SELECT SUM(received_bytes) AS received, " - "SUM(sent_bytes) AS sent," - "SUM(sent_bytes) + SUM(received_bytes) AS total FROM {key} WHERE " + "SELECT SUM(received_bytes_tcp) AS received, " + "SUM(sent_bytes_tcp) AS sent," + "SUM(sent_bytes_tcp) + SUM(received_bytes_tcp) AS total FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d)" ) @@ -124,6 +124,20 @@ "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, + 'bandwidth_udp': { + 'influxdb': ( + "SELECT MEAN(sent_bps_udp) AS sent FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'transfer_udp': { + 'influxdb': ( + "SELECT SUM(sent_bytes_udp) AS sent FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' AND " + "object_id = '{object_id}' GROUP BY time(1d)" + ) + }, 'jitter': { 'influxdb': ( "SELECT MEAN(jitter) AS jitter FROM {key} " @@ -134,7 +148,7 @@ 'datagram': { 'influxdb': ( "SELECT MEAN(lost_packets) AS lost_datagram," - "MEAN(packets) AS total_datagram FROM {key} WHERE " + "MEAN(total_packets) AS total_datagram FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' " "AND object_id = '{object_id}' GROUP BY time(1d)" ) diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index dd206e5b4..2b47bda3c 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -548,20 +548,22 @@ def _get_access_tech(): 'key': 'iperf', 'field_name': 'iperf_result', 'related_fields': [ - 'sent_bps', - 'received_bps', - 'sent_bytes', - 'received_bytes', + 'sent_bps_tcp', + 'received_bps_tcp', + 'sent_bytes_tcp', + 'received_bytes_tcp', 'retransmits', + 'sent_bytes_udp', + 'sent_bps_udp', 'jitter', - 'packets', + 'total_packets', 'lost_packets', 'lost_percent', ], 'charts': { - 'bandwidth': { + 'bandwidth_tcp': { 'type': 'stackedbar', - 'title': _('Bandwidth'), + 'title': _('Bandwidth (TCP)'), 'description': _('Bitrate during Iperf3 test in TCP mode.'), 'summary_labels': [ _('Sent bitrate'), @@ -569,12 +571,12 @@ def _get_access_tech(): ], 'unit': _(' Gbps'), 'order': 280, - 'query': chart_query['bitrate'], + 'query': chart_query['bandwidth_tcp'], 'colors': [DEFAULT_COLORS[5], DEFAULT_COLORS[9]], }, - 'transfer': { + 'transfer_tcp': { 'type': 'stackedbar+lines', - 'title': _('Transfer'), + 'title': _('Transfer (TCP)'), 'trace_type': { 'received': 'stackedbar', 'sent': 'stackedbar', @@ -589,29 +591,50 @@ def _get_access_tech(): ], 'unit': _(' GB'), 'order': 290, - 'query': chart_query['transfer'], + 'query': chart_query['transfer_tcp'], 'colors': [DEFAULT_COLORS[7], DEFAULT_COLORS[2], DEFAULT_COLORS[4]], }, 'retransmits': { 'type': 'bar', 'title': _('Retransmits'), - 'colors': [DEFAULT_COLORS[-3]], 'description': _('No. of retransmits during Iperf3 test in TCP mode.'), + 'summary_labels': [_('Restransmits')], 'unit': '', 'order': 300, 'query': chart_query['retransmits'], + 'colors': [DEFAULT_COLORS[-3]], + }, + 'bandwidth_udp': { + 'type': 'bar', + 'title': _('Bandwidth (UDP)'), + 'description': _('Bitrate during Iperf3 test in UDP mode.'), + 'summary_labels': [_('Sent bitrate')], + 'unit': _(' Mbps'), + 'order': 310, + 'query': chart_query['bandwidth_udp'], + 'colors': [DEFAULT_COLORS[5]], + }, + 'transfer_udp': { + 'type': 'bar', + 'title': _('Transfer (UDP)'), + 'description': _('Transfer during Iperf3 test in UDP mode.'), + 'summary_labels': [_('Sent bytes')], + 'unit': _(' MB'), + 'order': 320, + 'query': chart_query['transfer_udp'], + 'colors': [DEFAULT_COLORS[4]], }, 'jitter': { 'type': 'scatter', 'title': _('Jitter'), 'description': _( - 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode' + 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode.' ), 'summary_labels': [ _('Jitter'), ], 'unit': _(' ms'), - 'order': 310, + 'order': 330, 'query': chart_query['jitter'], 'colors': [DEFAULT_COLORS[4]], }, @@ -619,14 +642,14 @@ def _get_access_tech(): 'type': 'stackedbar', 'title': _('Datagram'), 'description': _( - 'Lost/total datagram ratio during Iperf3 test in UDP mode' + 'Lost/Total datagram ratio measured by Iperf3 test in UDP mode.' ), 'summary_labels': [ _('Lost datagram'), _('Total datagram'), ], 'unit': '', - 'order': 320, + 'order': 340, 'query': chart_query['datagram'], 'colors': [DEFAULT_COLORS[3], DEFAULT_COLORS[2]], }, @@ -640,7 +663,7 @@ def _get_access_tech(): _('Datagram loss'), ], 'unit': '%', - 'order': 330, + 'order': 350, 'query': chart_query['datagram_loss'], 'colors': [DEFAULT_COLORS[8]], }, From 2b881e0616d770710cf5500dd460e5db7fd7e48b Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 29 Jun 2022 20:51:32 +0530 Subject: [PATCH 17/64] [docs] Added transfer & bandwidth charts (UDP) instructions --- README.rst | 46 ++++++++++++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/README.rst b/README.rst index 3ead77686..4604c28b8 100644 --- a/README.rst +++ b/README.rst @@ -806,26 +806,26 @@ Mobile Access Technology in use Iperf ~~~~~ -+--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **measurement**: | ``iperf`` | -+--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **types**: | ``int`` (iperf_result, retransmits, packets, lost_packets), ``float`` (sent_bps, received_bps, sent_bytes, received_bytes, jitter, lost_percent) | -+--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **fields**: | ``iperf_result``, ``sent_bps``, ``received_bps``, ``sent_bytes``, ``received_bytes``, ``retransmits``, ``jitter``, ``packets``, ``lost_packets``, ``lost_percent``| -+--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **configuration**: | ``iperf`` | -+--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **charts**: | ``bandwidth``, ``transfer``, ``retransmits``, ``jitter``, ``datagram``, ``datagram_loss`` | -+--------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ - -**Bandwidth**: - -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/bandwidth.png ++--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **measurement**: | ``iperf`` | ++--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **types**: | ``int`` (iperf_result, retransmits, total_packets, lost_packets), ``float`` (sent_bps_tcp, received_bps_tcp, sent_bytes_tcp, received_bytes_tcp, sent_bps_udp, sent_bytes_udp, jitter, lost_percent) | ++--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **fields**: | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | ++--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **configuration**: | ``iperf`` | ++--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| **charts**: | ``bandwidth_tcp``, ``transfer_tcp``, ``retransmits``, ``bandwidth_udp``, ``transfer_udp``, ``jitter``, ``datagram``, ``datagram_loss`` | ++--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +**Bandwidth (TCP)**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/bandwidth-tcp.png :align: center -**Transfer**: +**Transfer (TCP)**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/transfer.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/transfer-tcp.png :align: center **Retransmits**: @@ -833,6 +833,16 @@ Iperf .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/retransmits.png :align: center +**Bandwidth (UDP)**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/bandwidth-udp.png + :align: center + +**Transfer (UDP)**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/transfer-udp.png + :align: center + **Jitter**: .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/jitter.png @@ -1006,7 +1016,7 @@ Register your device to OpenWISP and make sure `iperf3 openwrt package `_. From 4d732bb3310db85781acc5c198541386f1373ba6 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Thu, 30 Jun 2022 23:09:11 +0530 Subject: [PATCH 18/64] [requested-changes] Logger warning changed --- openwisp_monitoring/check/classes/iperf.py | 12 ++--- openwisp_monitoring/check/tests/test_iperf.py | 53 +++++++++---------- 2 files changed, 28 insertions(+), 37 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index e0d75e04d..51642f0dd 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -24,13 +24,13 @@ def check(self, store=True): device_connection = self._get_device_connection(device) if not device_connection: logger.warning( - f'DeviceConnection is not properly set for "{device}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' ) return # The DeviceConnection could fail if the management tunnel is down. - if not self._connect(device_connection): + if not device_connection.connect(): logger.warning( - f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' + f'DeviceConnection for "{device}" is not working, iperf check skipped!' ) return servers = self._get_iperf_servers(device.organization.id) @@ -80,12 +80,6 @@ def _exec_command(self, dc, command): """ return dc.connector_instance.exec_command(command, raise_unexpected_exit=False) - def _connect(self, dc): - """ - Connects device returns its working status - """ - return dc.connect() - def _get_iperf_result(self, res, exit_code, device, mode): """ Get iperf test result diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index bca6a991b..66259b2a6 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -8,7 +8,6 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings -from ..classes import Iperf Chart = load_model('monitoring', 'Chart') AlertSettings = load_model('monitoring', 'AlertSettings') @@ -33,13 +32,15 @@ def tearDownClass(cls): _IPERF = settings.CHECK_CLASSES[2][0] _RESULT_KEYS = [ 'iperf_result', - 'sent_bps', - 'received_bps', - 'sent_bytes', - 'received_bytes', + 'sent_bps_tcp', + 'received_bps_tcp', + 'sent_bytes_tcp', + 'received_bytes_tcp', 'retransmits', + 'sent_bps_udp', + 'sent_bytes_udp', 'jitter', - 'packets', + 'total_packets', 'lost_packets', 'lost_percent', ] @@ -47,28 +48,24 @@ def tearDownClass(cls): @mock.patch.object(iperf_logger, 'warning') def test_iperf_get_device_connection(self, mock_warn): ckey = self._create_credentials_with_key(port=self.ssh_server.port) - device = self._create_device() - self._create_config(device=device) - dc = self._create_device_connection(device=device, credentials=ckey) - check = Check( - name='Iperf check', - check_type=self._IPERF, - content_object=device, - ) - with self.subTest('Test inactive or invalid device connection'): + dc = self._create_device_connection(credentials=ckey) + device = dc.device + check = Check.objects.get(check_type=self._IPERF) + + with self.subTest('Test device connection not working'): + dc.is_working = False + dc.save() check.perform_check(store=False) mock_warn.assert_called_with( - f'DeviceConnection is not properly set for "{device}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' ) - with self.subTest('Test active device connection when management tunnel down'): - dc.is_working = True - dc.save() - # Todo : Need to change this mock - with mock.patch.object( - Iperf, '_connect', return_value=False - ) as mocked_connect: - check.perform_check(store=False) - mock_warn.assert_called_with( - f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' - ) - self.assertEqual(mocked_connect.call_count, 1) + + # with self.subTest('Test active device connection when management tunnel down'): + # dc.is_working = True + # dc.save() + # auth_failed = AuthenticationException('Authentication failed.') + # with mock.patch( + # 'openwisp_monitoring.check.classes.iperf.Iperf.connect', + # side_effect=auth_failed, + # ) as mocked_device_connection: + # check.perform_check(store=False) From 8c488abc5a9ba560c03b0a649b0502600624911a Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 1 Jul 2022 04:23:26 +0530 Subject: [PATCH 19/64] [tests] Added test for Iperf check --- openwisp_monitoring/check/classes/iperf.py | 9 +- .../check/tests/iperf_test_result.py | 662 ++++++++++++++++++ openwisp_monitoring/check/tests/test_iperf.py | 92 ++- 3 files changed, 750 insertions(+), 13 deletions(-) create mode 100644 openwisp_monitoring/check/tests/iperf_test_result.py diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 51642f0dd..8d265e6cb 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -28,7 +28,7 @@ def check(self, store=True): ) return # The DeviceConnection could fail if the management tunnel is down. - if not device_connection.connect(): + if not self._connect(device_connection): logger.warning( f'DeviceConnection for "{device}" is not working, iperf check skipped!' ) @@ -80,11 +80,16 @@ def _exec_command(self, dc, command): """ return dc.connector_instance.exec_command(command, raise_unexpected_exit=False) + def _connect(self, dc): + """ + Connects device returns its working status (easier to mock) + """ + return dc.connect() + def _get_iperf_result(self, res, exit_code, device, mode): """ Get iperf test result """ - res_dict = json.loads(res) if mode == 'TCP': if exit_code != 0: diff --git a/openwisp_monitoring/check/tests/iperf_test_result.py b/openwisp_monitoring/check/tests/iperf_test_result.py new file mode 100644 index 000000000..20b8a2ded --- /dev/null +++ b/openwisp_monitoring/check/tests/iperf_test_result.py @@ -0,0 +1,662 @@ +# flake8: noqa + +RESULT_TCP = """{ + "start": { + "connected": [{ + "socket": 5, + "local_host": "127.0.0.1", + "local_port": 54966, + "remote_host": "127.0.0.1", + "remote_port": 5201 + }], + "version": "iperf 3.9", + "system_info": "Linux openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", + "timestamp": { + "time": "Thu, 30 Jun 2022 21:39:55 GMT", + "timesecs": 1656625195 + }, + "connecting_to": { + "host": "localhost", + "port": 5201 + }, + "cookie": "npx4ad65t3j4wginxr4a7mqedmkhhspx3sob", + "tcp_mss_default": 32768, + "sock_bufsize": 0, + "sndbuf_actual": 16384, + "rcvbuf_actual": 131072, + "test_start": { + "protocol": "TCP", + "num_streams": 1, + "blksize": 131072, + "omit": 0, + "duration": 10, + "bytes": 0, + "blocks": 0, + "reverse": 0, + "tos": 0 + } + }, + "intervals": [{ + "streams": [{ + "socket": 5, + "start": 0, + "end": 1.000048, + "seconds": 1.000048041343689, + "bytes": 5790760960, + "bits_per_second": 46323862219.414116, + "retransmits": 0, + "snd_cwnd": 1506109, + "rtt": 22, + "rttvar": 3, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 0, + "end": 1.000048, + "seconds": 1.000048041343689, + "bytes": 5790760960, + "bits_per_second": 46323862219.414116, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 1.000048, + "end": 2.000185, + "seconds": 1.0001369714736938, + "bytes": 5463080960, + "bits_per_second": 43698662209.838669, + "retransmits": 0, + "snd_cwnd": 2160939, + "rtt": 22, + "rttvar": 3, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 1.000048, + "end": 2.000185, + "seconds": 1.0001369714736938, + "bytes": 5463080960, + "bits_per_second": 43698662209.838669, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 2.000185, + "end": 3.00019, + "seconds": 1.0000050067901611, + "bytes": 5679349760, + "bits_per_second": 45434570598.638954, + "retransmits": 0, + "snd_cwnd": 2553837, + "rtt": 21, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 2.000185, + "end": 3.00019, + "seconds": 1.0000050067901611, + "bytes": 5679349760, + "bits_per_second": 45434570598.638954, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 3.00019, + "end": 4.000232, + "seconds": 1.0000419616699219, + "bytes": 5710807040, + "bits_per_second": 45684539320.4405, + "retransmits": 0, + "snd_cwnd": 2553837, + "rtt": 24, + "rttvar": 5, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 3.00019, + "end": 4.000232, + "seconds": 1.0000419616699219, + "bytes": 5710807040, + "bits_per_second": 45684539320.4405, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 4.000232, + "end": 5.000158, + "seconds": 0.999925971031189, + "bytes": 5307105280, + "bits_per_second": 42459985508.942955, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 27, + "rttvar": 4, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 4.000232, + "end": 5.000158, + "seconds": 0.999925971031189, + "bytes": 5307105280, + "bits_per_second": 42459985508.942955, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 5.000158, + "end": 6.000229, + "seconds": 1.0000710487365723, + "bytes": 5308416000, + "bits_per_second": 42464310964.356567, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 28, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 5.000158, + "end": 6.000229, + "seconds": 1.0000710487365723, + "bytes": 5308416000, + "bits_per_second": 42464310964.356567, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 6.000229, + "end": 7.000056, + "seconds": 0.99982702732086182, + "bytes": 5241569280, + "bits_per_second": 41939808681.0701, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 23, + "rttvar": 4, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 6.000229, + "end": 7.000056, + "seconds": 0.99982702732086182, + "bytes": 5241569280, + "bits_per_second": 41939808681.0701, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 7.000056, + "end": 8.000202, + "seconds": 1.0001460313797, + "bytes": 5734400000, + "bits_per_second": 45868501759.403313, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 22, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 7.000056, + "end": 8.000202, + "seconds": 1.0001460313797, + "bytes": 5734400000, + "bits_per_second": 45868501759.403313, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 8.000202, + "end": 9.0003, + "seconds": 1.0000979900360107, + "bytes": 5415895040, + "bits_per_second": 43322915105.98867, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 35, + "rttvar": 12, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 8.000202, + "end": 9.0003, + "seconds": 1.0000979900360107, + "bytes": 5415895040, + "bits_per_second": 43322915105.98867, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 9.0003, + "end": 10.000218, + "seconds": 0.999917984008789, + "bytes": 5402787840, + "bits_per_second": 43225847930.763977, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 26, + "rttvar": 17, + "pmtu": 65535, + "omitted": false, + "sender": true + }], + "sum": { + "start": 9.0003, + "end": 10.000218, + "seconds": 0.999917984008789, + "bytes": 5402787840, + "bits_per_second": 43225847930.763977, + "retransmits": 0, + "omitted": false, + "sender": true + } + }], + "end": { + "streams": [{ + "sender": { + "socket": 5, + "start": 0, + "end": 10.000218, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042377604.168228, + "retransmits": 0, + "max_snd_cwnd": 3208667, + "max_rtt": 35, + "min_rtt": 21, + "mean_rtt": 25, + "sender": true + }, + "receiver": { + "socket": 5, + "start": 0, + "end": 10.000272, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042139781.797935, + "sender": true + } + }], + "sum_sent": { + "start": 0, + "end": 10.000218, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042377604.168228, + "retransmits": 0, + "sender": true + }, + "sum_received": { + "start": 0, + "end": 10.000272, + "seconds": 10.000272, + "bytes": 55054172160, + "bits_per_second": 44042139781.797935, + "sender": true + }, + "cpu_utilization_percent": { + "host_total": 99.498820810699755, + "host_user": 0.66204905391509139, + "host_system": 98.83676176238454, + "remote_total": 0.377797593572381, + "remote_user": 0.02174276147834767, + "remote_system": 0.35605477540538377 + }, + "sender_tcp_congestion": "cubic", + "receiver_tcp_congestion": "cubic" + } +}""" + +RESULT_UDP = """{ + "start": { + "connected": [{ + "socket": 5, + "local_host": "127.0.0.1", + "local_port": 54477, + "remote_host": "127.0.0.1", + "remote_port": 5201 + }], + "version": "iperf 3.9", + "system_info": "openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", + "timestamp": { + "time": "Thu, 30 Jun 2022 21:10:31 GMT", + "timesecs": 1656623431 + }, + "connecting_to": { + "host": "localhost", + "port": 5201 + }, + "cookie": "kvuxkz3ncutquvpl2evufmdkn726molzocot", + "sock_bufsize": 0, + "sndbuf_actual": 212992, + "rcvbuf_actual": 212992, + "test_start": { + "protocol": "UDP", + "num_streams": 1, + "blksize": 32768, + "omit": 0, + "duration": 10, + "bytes": 0, + "blocks": 0, + "reverse": 0, + "tos": 0 + } + }, + "intervals": [{ + "streams": [{ + "socket": 5, + "start": 0, + "end": 1.000057, + "seconds": 1.0000569820404053, + "bytes": 131072, + "bits_per_second": 1048516.253404483, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 0, + "end": 1.000057, + "seconds": 1.0000569820404053, + "bytes": 131072, + "bits_per_second": 1048516.253404483, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 1.000057, + "end": 2.000079, + "seconds": 1.0000220537185669, + "bytes": 131072, + "bits_per_second": 1048552.8755099809, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 1.000057, + "end": 2.000079, + "seconds": 1.0000220537185669, + "bytes": 131072, + "bits_per_second": 1048552.8755099809, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 2.000079, + "end": 3.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 2.000079, + "end": 3.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 3.000079, + "end": 4.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 3.000079, + "end": 4.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 4.000079, + "end": 5.000182, + "seconds": 1.0001029968261719, + "bytes": 131072, + "bits_per_second": 1048468.0111225117, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 4.000079, + "end": 5.000182, + "seconds": 1.0001029968261719, + "bytes": 131072, + "bits_per_second": 1048468.0111225117, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 5.000182, + "end": 6.000056, + "seconds": 0.99987399578094482, + "bytes": 131072, + "bits_per_second": 1048708.1416504055, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 5.000182, + "end": 6.000056, + "seconds": 0.99987399578094482, + "bytes": 131072, + "bits_per_second": 1048708.1416504055, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 6.000056, + "end": 7.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 6.000056, + "end": 7.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 7.000056, + "end": 8.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 7.000056, + "end": 8.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 8.000056, + "end": 9.000057, + "seconds": 1.0000009536743164, + "bytes": 131072, + "bits_per_second": 1048575.0000009537, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 8.000056, + "end": 9.000057, + "seconds": 1.0000009536743164, + "bytes": 131072, + "bits_per_second": 1048575.0000009537, + "packets": 4, + "omitted": false, + "sender": true + } + }, { + "streams": [{ + "socket": 5, + "start": 9.000057, + "end": 10.00006, + "seconds": 1.0000029802322388, + "bytes": 131072, + "bits_per_second": 1048572.8750093132, + "packets": 4, + "omitted": false, + "sender": true + }], + "sum": { + "start": 9.000057, + "end": 10.00006, + "seconds": 1.0000029802322388, + "bytes": 131072, + "bits_per_second": 1048572.8750093132, + "packets": 4, + "omitted": false, + "sender": true + } + }], + "end": { + "streams": [{ + "udp": { + "socket": 5, + "start": 0, + "end": 10.00006, + "seconds": 10.00006, + "bytes": 1310720, + "bits_per_second": 1048569.7085817485, + "jitter_ms": 0.011259258240784126, + "lost_packets": 0, + "packets": 40, + "lost_percent": 0, + "out_of_order": 0, + "sender": true + } + }], + "sum": { + "start": 0, + "end": 10.000115, + "seconds": 10.000115, + "bytes": 1310720, + "bits_per_second": 1048569.7085817485, + "jitter_ms": 0.011259258240784126, + "lost_packets": 0, + "packets": 40, + "lost_percent": 0, + "sender": true + }, + "cpu_utilization_percent": { + "host_total": 0.6057128493969417, + "host_user": 0, + "host_system": 0.6057128493969417, + "remote_total": 0.016163250220207454, + "remote_user": 0.01616789349806445, + "remote_system": 0 + } + } +}""" + +RESULT_FAIL = """{ + "start": { + "connected": [], + "version": "iperf 3.7", + "system_info": "Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64" + }, + "intervals": [], + "end": { + }, + "error": "error - unable to connect to server: Connection refused" +}""" diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 66259b2a6..4d7779dd6 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -1,4 +1,4 @@ -from unittest import mock +from unittest.mock import call, patch from django.test import TransactionTestCase from swapper import load_model @@ -8,6 +8,8 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings +from ..classes import Iperf +from .iperf_test_result import RESULT_FAIL, RESULT_TCP, RESULT_UDP Chart = load_model('monitoring', 'Chart') AlertSettings = load_model('monitoring', 'AlertSettings') @@ -45,13 +47,24 @@ def tearDownClass(cls): 'lost_percent', ] - @mock.patch.object(iperf_logger, 'warning') + @patch.object(iperf_logger, 'warning') def test_iperf_get_device_connection(self, mock_warn): ckey = self._create_credentials_with_key(port=self.ssh_server.port) dc = self._create_device_connection(credentials=ckey) device = dc.device check = Check.objects.get(check_type=self._IPERF) + with self.subTest('Test active device connection when management tunnel down'): + dc.is_working = True + dc.save() + with patch.object(Iperf, '_connect', return_value=False) as mocked_connect: + check.perform_check(store=False) + mock_warn.assert_called_with( + f'DeviceConnection for "{device}" is not working, iperf check skipped!' + ) + mocked_connect.assert_called_once_with(dc) + self.assertEqual(mocked_connect.call_count, 1) + with self.subTest('Test device connection not working'): dc.is_working = False dc.save() @@ -60,12 +73,69 @@ def test_iperf_get_device_connection(self, mock_warn): f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' ) - # with self.subTest('Test active device connection when management tunnel down'): - # dc.is_working = True - # dc.save() - # auth_failed = AuthenticationException('Authentication failed.') - # with mock.patch( - # 'openwisp_monitoring.check.classes.iperf.Iperf.connect', - # side_effect=auth_failed, - # ) as mocked_device_connection: - # check.perform_check(store=False) + @patch.object( + Iperf, '_exec_command', side_effect=[(RESULT_TCP, 0), (RESULT_UDP, 0)] + ) + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + def test_iperf_check(self, mock_get_iperf_servers, mock_exec_command): + + ckey = self._create_credentials_with_key(port=self.ssh_server.port) + dc = self._create_device_connection(credentials=ckey) + dc.connect() + device = dc.device + check = Check.objects.get(check_type=self._IPERF) + expected_exec_command_calls = [ + call(dc, 'iperf3 -c iperf.openwisptestserver.com -J'), + call(dc, 'iperf3 -c iperf.openwisptestserver.com -u -J'), + ] + self.assertEqual(Chart.objects.count(), 2) + self.assertEqual(Metric.objects.count(), 2) + check.perform_check(store=False) + iperf_metric = Metric.objects.get(key='iperf').read()[0] + self.assertEqual(iperf_metric['iperf_result'], 1) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + mock_get_iperf_servers.assert_called_once_with(device.organization.id) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(expected_exec_command_calls) + + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + @patch.object(iperf_logger, 'warning') + def test_iperf_check_fail( + self, mock_warn, mock_get_iperf_servers, mock_exec_command + ): + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] + ckey = self._create_credentials_with_key(port=self.ssh_server.port) + dc = self._create_device_connection(credentials=ckey) + dc.connect() + device = dc.device + check = Check.objects.get(check_type=self._IPERF) + expected_exec_command_calls = [ + call(dc, 'iperf3 -c iperf.openwisptestserver.com -J'), + call(dc, 'iperf3 -c iperf.openwisptestserver.com -u -J'), + ] + expected_mock_warns = [ + call( + f'Iperf check failed for "{device}", error - unable to connect to server: Connection refused' + ), + call( + f'Iperf check failed for "{device}", error - unable to connect to server: Connection refused' + ), + ] + self.assertEqual(Chart.objects.count(), 0) + self.assertEqual(Metric.objects.count(), 0) + check.perform_check(store=False) + self.assertEqual(mock_warn.call_count, 2) + mock_warn.assert_has_calls(expected_mock_warns) + iperf_metric = Metric.objects.get(key='iperf').read()[0] + self.assertEqual(iperf_metric['iperf_result'], 0) + self.assertEqual(Chart.objects.count(), 8) + self.assertEqual(Metric.objects.count(), 1) + mock_get_iperf_servers.assert_called_once_with(device.organization.id) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(expected_exec_command_calls) From f89563034ee0244f5849b1e9ae1f922079968ad2 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 1 Jul 2022 17:40:27 +0530 Subject: [PATCH 20/64] [requested-changes] Set AUTO_IPERF to false --- openwisp_monitoring/check/settings.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index e04061cdf..97c6189a4 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -12,6 +12,5 @@ AUTO_CONFIG_CHECK = get_settings_value('AUTO_DEVICE_CONFIG_CHECK', True) MANAGEMENT_IP_ONLY = get_settings_value('MANAGEMENT_IP_ONLY', True) PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) -# By default it should be disabled. -AUTO_IPERF = get_settings_value('AUTO_IPERF', True) +AUTO_IPERF = get_settings_value('AUTO_IPERF', False) IPERF_SERVERS = get_settings_value('IPERF_SERVERS', {}) From c75428e79debde7016665a23b9e88383c848d2f9 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 1 Jul 2022 18:04:20 +0530 Subject: [PATCH 21/64] [docs] Updated iperf table structure & minor changes --- README.rst | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/README.rst b/README.rst index 4604c28b8..836c25822 100644 --- a/README.rst +++ b/README.rst @@ -806,17 +806,19 @@ Mobile Access Technology in use Iperf ~~~~~ -+--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **measurement**: | ``iperf`` | -+--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **types**: | ``int`` (iperf_result, retransmits, total_packets, lost_packets), ``float`` (sent_bps_tcp, received_bps_tcp, sent_bytes_tcp, received_bytes_tcp, sent_bps_udp, sent_bytes_udp, jitter, lost_percent) | -+--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **fields**: | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | -+--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **configuration**: | ``iperf`` | -+--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| **charts**: | ``bandwidth_tcp``, ``transfer_tcp``, ``retransmits``, ``bandwidth_udp``, ``transfer_udp``, ``jitter``, ``datagram``, ``datagram_loss`` | -+--------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ ++--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| **measurement**: | ``iperf`` | ++--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| **types**: | ``int`` (iperf_result, retransmits, total_packets, lost_packets), | +| | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bytes_tcp, received_bytes_tcp, sent_bps_udp, sent_bytes_udp, jitter, lost_percent) | ++--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| **fields**: | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | +| **fields**: | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | ++--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| **configuration**: | ``iperf`` | ++--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| **charts**: | ``bandwidth_tcp``, ``transfer_tcp``, ``retransmits``, ``bandwidth_udp``, ``transfer_udp``, ``jitter``, ``datagram``, ``datagram_loss`` | ++--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ **Bandwidth (TCP)**: @@ -1157,8 +1159,8 @@ created automatically for newly registered devices. It's enabled by default. | **default**: | ``False`` | +--------------+-------------+ -Whether `Iperf <#iperf-1>`_ checks are created automatically for devices. The devices need to have SSH access from OpenWISP & must installed -`iperf3 openwrt package `_ on devices in order for this check to work. +Whether `Iperf <#iperf-1>`_ checks are created automatically for devices. The devices must have ``SSH`` `access credential `_ & +`iperf3 openwrt package `_ must be installed on the devices in order for this check to work. ``OPENWISP_MONITORING_AUTO_CHARTS`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 72a9eef80a7f2d0619ef806e8f6880c9d569762a Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 1 Jul 2022 23:22:25 +0530 Subject: [PATCH 22/64] [tests] Improved Iperf tests --- openwisp_monitoring/check/tests/test_iperf.py | 157 ++++++++++++------ 1 file changed, 108 insertions(+), 49 deletions(-) diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 4d7779dd6..7792ce4f2 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -73,43 +73,7 @@ def test_iperf_get_device_connection(self, mock_warn): f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' ) - @patch.object( - Iperf, '_exec_command', side_effect=[(RESULT_TCP, 0), (RESULT_UDP, 0)] - ) - @patch.object( - Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] - ) - def test_iperf_check(self, mock_get_iperf_servers, mock_exec_command): - - ckey = self._create_credentials_with_key(port=self.ssh_server.port) - dc = self._create_device_connection(credentials=ckey) - dc.connect() - device = dc.device - check = Check.objects.get(check_type=self._IPERF) - expected_exec_command_calls = [ - call(dc, 'iperf3 -c iperf.openwisptestserver.com -J'), - call(dc, 'iperf3 -c iperf.openwisptestserver.com -u -J'), - ] - self.assertEqual(Chart.objects.count(), 2) - self.assertEqual(Metric.objects.count(), 2) - check.perform_check(store=False) - iperf_metric = Metric.objects.get(key='iperf').read()[0] - self.assertEqual(iperf_metric['iperf_result'], 1) - self.assertEqual(Chart.objects.count(), 10) - self.assertEqual(Metric.objects.count(), 3) - mock_get_iperf_servers.assert_called_once_with(device.organization.id) - self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(expected_exec_command_calls) - - @patch.object(Iperf, '_exec_command') - @patch.object( - Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] - ) - @patch.object(iperf_logger, 'warning') - def test_iperf_check_fail( - self, mock_warn, mock_get_iperf_servers, mock_exec_command - ): - mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] + def test_iperf_check(self): ckey = self._create_credentials_with_key(port=self.ssh_server.port) dc = self._create_device_connection(credentials=ckey) dc.connect() @@ -127,15 +91,110 @@ def test_iperf_check_fail( f'Iperf check failed for "{device}", error - unable to connect to server: Connection refused' ), ] - self.assertEqual(Chart.objects.count(), 0) - self.assertEqual(Metric.objects.count(), 0) - check.perform_check(store=False) - self.assertEqual(mock_warn.call_count, 2) - mock_warn.assert_has_calls(expected_mock_warns) - iperf_metric = Metric.objects.get(key='iperf').read()[0] - self.assertEqual(iperf_metric['iperf_result'], 0) - self.assertEqual(Chart.objects.count(), 8) - self.assertEqual(Metric.objects.count(), 1) - mock_get_iperf_servers.assert_called_once_with(device.organization.id) - self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(expected_exec_command_calls) + + with self.subTest('Test iperf check passes in both TCP & UDP'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers: + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + self.assertEqual(Chart.objects.count(), 2) + self.assertEqual(Metric.objects.count(), 2) + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 44.04) + self.assertEqual(result['received_bytes_tcp'], 55.05) + self.assertEqual(result['jitter'], 0.01) + self.assertEqual(result['total_packets'], 40) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(expected_exec_command_calls) + mock_get_iperf_servers.assert_called_once_with(device.organization.id) + + with self.subTest('Test iperf check fails in both TCP & UDP'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn: + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] + + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 0) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(expected_mock_warns) + mock_exec_command.assert_has_calls(expected_exec_command_calls) + mock_get_iperf_servers.assert_called_once_with(device.organization.id) + + with self.subTest('Test iperf check TCP pass UDP fail'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn: + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_FAIL, 1)] + + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 44.04) + self.assertEqual(result['sent_bytes_tcp'], 55.05) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(expected_mock_warns[1:]) + mock_exec_command.assert_has_calls(expected_exec_command_calls) + mock_get_iperf_servers.assert_called_once_with(device.organization.id) + + with self.subTest('Test iperf check TCP fail UDP pass'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn: + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_UDP, 0)] + + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['jitter'], 0.01) + self.assertEqual(result['total_packets'], 40) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(expected_mock_warns[1:]) + mock_exec_command.assert_has_calls(expected_exec_command_calls) + mock_get_iperf_servers.assert_called_once_with(device.organization.id) From 59089944b285b38ec16847926ea16bb035c1c5c4 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 4 Jul 2022 02:59:51 +0530 Subject: [PATCH 23/64] [change] Made Iperf check command configurable #398 - Made Iperf check command configurable - Added some more tests and improved previous ones. Closes #398 --- openwisp_monitoring/check/classes/iperf.py | 64 ++++- openwisp_monitoring/check/settings.py | 1 + openwisp_monitoring/check/tests/test_iperf.py | 260 +++++++++++++++--- 3 files changed, 283 insertions(+), 42 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 8d265e6cb..02ca5b45b 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -1,9 +1,13 @@ import json import logging +from django.core.exceptions import ValidationError +from jsonschema import draft7_format_checker, validate +from jsonschema.exceptions import ValidationError as SchemaError from swapper import load_model from openwisp_controller.connection.settings import UPDATE_STRATEGIES +from openwisp_utils.utils import deep_merge_dicts from .. import settings as app_settings from .base import BaseCheck @@ -17,9 +21,57 @@ AlertSettings = load_model('monitoring', 'AlertSettings') DeviceConnection = load_model('connection', 'DeviceConnection') +DEFAULT_IPERF_CHECK_CONFIG = { + 'port': { + 'type': 'integer', + 'default': 5201, + # max, min port chosen from iperf3 docs + 'minimum': 1, + 'maximum': 65535, + }, + 'time': { + 'type': 'integer', + # Sets the interval time in seconds + # between periodic bandwidth, jitter, and loss reports. + # If zero, no periodic reports are printed. + 'default': 10, + 'minimum': 1, + # arbitrary chosen to avoid slowing down the queue (30min) + 'maximum': 1800, + }, +} + + +def get_iperf_schema(): + schema = { + '$schema': 'http://json-schema.org/draft-07/schema#', + 'type': 'object', + 'additionalProperties': False, + } + schema['properties'] = deep_merge_dicts( + DEFAULT_IPERF_CHECK_CONFIG, app_settings.IPERF_CHECK_CONFIG + ) + return schema + class Iperf(BaseCheck): + + schema = get_iperf_schema() + + def validate_params(self): + try: + validate(self.params, self.schema, format_checker=draft7_format_checker) + except SchemaError as e: + message = 'Invalid param' + path = '/'.join(e.path) + if path: + message = '{0} in "{1}"'.format(message, path) + message = '{0}: {1}'.format(message, e.message) + raise ValidationError({'params': message}) from e + def check(self, store=True): + port = self._get_param('port') + time = self._get_param('time') device = self.related_object device_connection = self._get_device_connection(device) if not device_connection: @@ -36,12 +88,12 @@ def check(self, store=True): servers = self._get_iperf_servers(device.organization.id) # TCP mode - command = f'iperf3 -c {servers[0]} -J' + command = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' res, exit_code = self._exec_command(device_connection, command) result_tcp = self._get_iperf_result(res, exit_code, device, mode='TCP') # UDP mode - command = f'iperf3 -c {servers[0]} -u -J' + command = f'iperf3 -c {servers[0]} -p {port} -t {time} -u -J' res, exit_code = self._exec_command(device_connection, command) result_udp = self._get_iperf_result(res, exit_code, device, mode='UDP') @@ -86,6 +138,12 @@ def _connect(self, dc): """ return dc.connect() + def _get_param(self, param): + """ + Gets specified param or its default value according to the schema + """ + return self.params.get(param, self.schema['properties'][param]['default']) + def _get_iperf_result(self, res, exit_code, device, mode): """ Get iperf test result @@ -146,7 +204,7 @@ def _get_iperf_result(self, res, exit_code, device, mode): else: sent_bytes = res_dict['end']['sum']['bytes'] sent_bytes_MB = sent_bytes / 1000000 - sent_bps = res_dict['end']['sum']['bytes'] + sent_bps = res_dict['end']['sum']['bits_per_second'] sent_Mbps = sent_bps / 1000000 jitter_ms = res_dict['end']['sum']['jitter_ms'] jitter_ms = res_dict['end']['sum']['jitter_ms'] diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 97c6189a4..3f99a881c 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -14,3 +14,4 @@ PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) AUTO_IPERF = get_settings_value('AUTO_IPERF', False) IPERF_SERVERS = get_settings_value('IPERF_SERVERS', {}) +IPERF_CHECK_CONFIG = get_settings_value('IPERF_CHECK_CONFIG', {}) diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 7792ce4f2..b283e8a03 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -1,9 +1,11 @@ from unittest.mock import call, patch +from django.core.exceptions import ValidationError from django.test import TransactionTestCase from swapper import load_model from openwisp_controller.connection.tests.utils import CreateConnectionsMixin, SshServer +from openwisp_monitoring.check.classes.iperf import get_iperf_schema from openwisp_monitoring.check.classes.iperf import logger as iperf_logger from ...device.tests import TestDeviceMonitoringMixin @@ -31,6 +33,26 @@ def tearDownClass(cls): super().tearDownClass() cls.mock_ssh_server.__exit__() + def _create_iperf_test_env(self): + ckey = self._create_credentials_with_key(port=self.ssh_server.port) + dc = self._create_device_connection(credentials=ckey) + dc.connect() + self.device = dc.device + self._EXPECTED_COMMAND_CALLS = [ + call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 -J'), + call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 -u -J'), + ] + self._EXPECTED_WARN_CALLS = [ + call( + f'Iperf check failed for "{self.device}", error - unable to connect to server: Connection refused' # noqa + ), + call( + f'Iperf check failed for "{self.device}", error - unable to connect to server: Connection refused' # noqa + ), + ] + check = Check.objects.get(check_type=self._IPERF) + return check, dc + _IPERF = settings.CHECK_CLASSES[2][0] _RESULT_KEYS = [ 'iperf_result', @@ -47,20 +69,114 @@ def tearDownClass(cls): 'lost_percent', ] + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) @patch.object(iperf_logger, 'warning') - def test_iperf_get_device_connection(self, mock_warn): - ckey = self._create_credentials_with_key(port=self.ssh_server.port) - dc = self._create_device_connection(credentials=ckey) - device = dc.device - check = Check.objects.get(check_type=self._IPERF) + def test_iperf_check_no_params( + self, mock_warn, mock_get_iperf_servers, mock_exec_command + ): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + # By default check params {} + check, _ = self._create_iperf_test_env() + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 44.04) + self.assertEqual(result['received_bytes_tcp'], 55.05) + self.assertEqual(result['jitter'], 0.01) + self.assertEqual(result['total_packets'], 40) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + @patch.object(iperf_logger, 'warning') + def test_iperf_check_params( + self, mock_warn, mock_get_iperf_servers, mock_exec_command + ): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + check, dc = self._create_iperf_test_env() + test_params = {'port': 6201, 'time': 20} + check.params = test_params + check.save() + self._EXPECTED_COMMAND_CALLS = [ + call( + dc, + f'iperf3 -c iperf.openwisptestserver.com -p {test_params["port"]} -t {test_params["time"]} -J', # noqa + ), + call( + dc, + f'iperf3 -c iperf.openwisptestserver.com -p {test_params["port"]} -t {test_params["time"]} -u -J', # noqa + ), + ] + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 44.04) + self.assertEqual(result['received_bytes_tcp'], 55.05) + self.assertEqual(result['jitter'], 0.01) + self.assertEqual(result['total_packets'], 40) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + @patch.object(iperf_logger, 'warning') + @patch.object( + settings, + 'IPERF_CHECK_CONFIG', + { + 'port': {'default': 9201}, + 'time': {'default': 120}, + }, + ) + def test_iperf_check_config( + self, mock_warn, mock_get_iperf_servers, mock_exec_command, *args + ): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + check, dc = self._create_iperf_test_env() + self._EXPECTED_COMMAND_CALLS = [ + call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -J'), + call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -u -J'), + ] + with patch.object(Iperf, 'schema', get_iperf_schema()): + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 44.04) + self.assertEqual(result['received_bytes_tcp'], 55.05) + self.assertEqual(result['jitter'], 0.01) + self.assertEqual(result['total_packets'], 40) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + + @patch.object(iperf_logger, 'warning') + def test_iperf_device_connection(self, mock_warn): + check, dc = self._create_iperf_test_env() with self.subTest('Test active device connection when management tunnel down'): - dc.is_working = True - dc.save() with patch.object(Iperf, '_connect', return_value=False) as mocked_connect: check.perform_check(store=False) mock_warn.assert_called_with( - f'DeviceConnection for "{device}" is not working, iperf check skipped!' + f'DeviceConnection for "{self.device}" is not working, iperf check skipped!' ) mocked_connect.assert_called_once_with(dc) self.assertEqual(mocked_connect.call_count, 1) @@ -70,28 +186,59 @@ def test_iperf_get_device_connection(self, mock_warn): dc.save() check.perform_check(store=False) mock_warn.assert_called_with( - f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' ) - def test_iperf_check(self): - ckey = self._create_credentials_with_key(port=self.ssh_server.port) - dc = self._create_device_connection(credentials=ckey) - dc.connect() - device = dc.device - check = Check.objects.get(check_type=self._IPERF) - expected_exec_command_calls = [ - call(dc, 'iperf3 -c iperf.openwisptestserver.com -J'), - call(dc, 'iperf3 -c iperf.openwisptestserver.com -u -J'), - ] - expected_mock_warns = [ - call( - f'Iperf check failed for "{device}", error - unable to connect to server: Connection refused' - ), - call( - f'Iperf check failed for "{device}", error - unable to connect to server: Connection refused' - ), + def test_iperf_check_content_object_none(self): + check = Check(name='Iperf check', check_type=self._IPERF, params={}) + try: + check.check_instance.validate() + except ValidationError as e: + self.assertIn('device', str(e)) + else: + self.fail('ValidationError not raised') + + def test_iperf_check_content_object_not_device(self): + check = Check( + name='Iperf check', + check_type=self._IPERF, + content_object=self._create_user(), + params={}, + ) + try: + check.check_instance.validate() + except ValidationError as e: + self.assertIn('device', str(e)) + else: + self.fail('ValidationError not raised') + + def test_iperf_check_schema_violation(self): + check, _ = self._create_iperf_test_env() + invalid_params = [ + {'port': -1232}, + {'time': 0}, + {'port': 'invalid port'}, + {'time': 'invalid time'}, + {'port': 'invalid port'}, + {'time': 'invalid time'}, + {'port': 0}, + {'port': 797979}, + {'time': 36000}, + {'port': ''}, + {'time': ''}, ] + for invalid_params in invalid_params: + check.params = invalid_params + check.save() + try: + check.check_instance.validate() + except ValidationError as e: + self.assertIn('Invalid param', str(e)) + else: + self.fail('ValidationError not raised') + def test_iperf_check(self): + check, _ = self._create_iperf_test_env() with self.subTest('Test iperf check passes in both TCP & UDP'): with patch.object( Iperf, '_exec_command' @@ -99,24 +246,53 @@ def test_iperf_check(self): Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers: + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn: mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - self.assertEqual(Chart.objects.count(), 2) self.assertEqual(Metric.objects.count(), 2) result = check.perform_check(store=False) for key in self._RESULT_KEYS: self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) self.assertEqual(result['sent_bps_tcp'], 44.04) self.assertEqual(result['received_bytes_tcp'], 55.05) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], 1.05) + self.assertEqual(result['sent_bytes_udp'], 1.31) self.assertEqual(result['jitter'], 0.01) self.assertEqual(result['total_packets'], 40) + self.assertEqual(result['lost_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Check.objects.count(), 3) + + iperf_metric = Metric.objects.get(key='iperf') self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(iperf_metric.content_object, self.device) + points = iperf_metric.read(limit=None, extra_fields=list(result.keys())) + self.assertEqual(len(points), 1) + self.assertEqual(points[0]['iperf_result'], result['iperf_result']) + self.assertEqual(points[0]['sent_bps_tcp'], result['sent_bps_tcp']) + self.assertEqual( + points[0]['received_bytes_tcp'], result['received_bytes_tcp'] + ) + self.assertEqual(points[0]['retransmits'], result['retransmits']) + self.assertEqual(points[0]['sent_bps_udp'], result['sent_bps_udp']) + self.assertEqual(points[0]['sent_bytes_udp'], result['sent_bytes_udp']) + self.assertEqual(points[0]['jitter'], result['jitter']) + self.assertEqual(points[0]['total_packets'], result['total_packets']) + self.assertEqual(points[0]['lost_packets'], result['lost_packets']) + self.assertEqual(points[0]['lost_percent'], result['lost_percent']) + + self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(expected_exec_command_calls) - mock_get_iperf_servers.assert_called_once_with(device.organization.id) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) with self.subTest('Test iperf check fails in both TCP & UDP'): with patch.object( @@ -141,9 +317,11 @@ def test_iperf_check(self): self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) - mock_warn.assert_has_calls(expected_mock_warns) - mock_exec_command.assert_has_calls(expected_exec_command_calls) - mock_get_iperf_servers.assert_called_once_with(device.organization.id) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) with self.subTest('Test iperf check TCP pass UDP fail'): with patch.object( @@ -168,9 +346,11 @@ def test_iperf_check(self): self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) - mock_warn.assert_has_calls(expected_mock_warns[1:]) - mock_exec_command.assert_has_calls(expected_exec_command_calls) - mock_get_iperf_servers.assert_called_once_with(device.organization.id) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) with self.subTest('Test iperf check TCP fail UDP pass'): with patch.object( @@ -195,6 +375,8 @@ def test_iperf_check(self): self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) - mock_warn.assert_has_calls(expected_mock_warns[1:]) - mock_exec_command.assert_has_calls(expected_exec_command_calls) - mock_get_iperf_servers.assert_called_once_with(device.organization.id) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) From ec80f2f7db65f89252a328f617cd565e0b46c07c Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 5 Jul 2022 18:20:22 +0530 Subject: [PATCH 24/64] [docs] Added OPENWISP_MONITORING_IPERF_CHECK_CONFIG - Added Iperf configurable command in docs. - Minor changes in tests. --- README.rst | 44 +++++++++++++++++++ openwisp_monitoring/check/tests/test_iperf.py | 23 +++++++++- 2 files changed, 65 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 836c25822..208636807 100644 --- a/README.rst +++ b/README.rst @@ -1006,6 +1006,9 @@ It also supports tuning of various parameters related to timing, buffers and pro This check is ``disabled`` by **default**, but you may choose to enable auto creation of this check by setting `OPENWISP_MONITORING_AUTO_IPERF <#OPENWISP_MONITORING_AUTO_IPERF>`_ to ``True``. +You can also change the default values used for iperf checks using +`OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ setting. + Instructions to configure Iperf Check ------------------------------------- @@ -1162,6 +1165,47 @@ created automatically for newly registered devices. It's enabled by default. Whether `Iperf <#iperf-1>`_ checks are created automatically for devices. The devices must have ``SSH`` `access credential `_ & `iperf3 openwrt package `_ must be installed on the devices in order for this check to work. +``OPENWISP_MONITORING_IPERF_CHECK_CONFIG`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------+ +| **type**: | ``dict`` | ++--------------+-------------+ +| **default**: | ``{}`` | ++--------------+-------------+ + +This setting allows to override the default iperf check configuration defined in +``openwisp_monitoring.check.classes.iperf.DEFAULT_IPERF_CHECK_CONFIG``. + +For example, if you want to change only the **port number** of +``iperf`` check you can use: + +.. code-block:: python + + OPENWISP_MONITORING_IPERF_CHECK_CONFIG = { + 'port': { + 'default': 6209, + }, + } + +Similar to `OPENWISP_MONITORING_PING_CHECK_CONFIG <#OPENWISP_MONITORING_PING_CHECK_CONFIG>`_ , +If you want to overriding the default value for any parameter beyond the maximum or minimum value defined in +``openwisp_monitoring.check.classes.iperf.DEFAULT_IPERF_CHECK_CONFIG``, +you will also need to override the ``maximum`` or ``minimum`` fields as following: + +.. code-block:: python + + OPENWISP_MONITORING_IPERF_CHECK_CONFIG = { + 'time': { + 'default': 2000, + 'minimum': 2100, + 'maximum': 3600, + }, + } + +**Note:** Above ``maximum`` and ``minimum`` values are only used for +validating custom parameters of a ``Check`` object. + ``OPENWISP_MONITORING_AUTO_CHARTS`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index b283e8a03..00c69c643 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -4,6 +4,7 @@ from django.test import TransactionTestCase from swapper import load_model +from openwisp_controller.connection.settings import UPDATE_STRATEGIES from openwisp_controller.connection.tests.utils import CreateConnectionsMixin, SshServer from openwisp_monitoring.check.classes.iperf import get_iperf_schema from openwisp_monitoring.check.classes.iperf import logger as iperf_logger @@ -181,6 +182,14 @@ def test_iperf_device_connection(self, mock_warn): mocked_connect.assert_called_once_with(dc) self.assertEqual(mocked_connect.call_count, 1) + with self.subTest('Test device connection is not enabled'): + dc.enabled = False + dc.save() + check.perform_check(store=False) + mock_warn.assert_called_with( + f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' + ) + with self.subTest('Test device connection not working'): dc.is_working = False dc.save() @@ -189,6 +198,16 @@ def test_iperf_device_connection(self, mock_warn): f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' ) + with self.subTest('Test device connection is not with right update strategy'): + dc.update_strategy = UPDATE_STRATEGIES[1][0] + dc.is_working = True + dc.enabled = True + dc.save() + check.perform_check(store=False) + mock_warn.assert_called_with( + f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' + ) + def test_iperf_check_content_object_none(self): check = Check(name='Iperf check', check_type=self._IPERF, params={}) try: @@ -219,8 +238,8 @@ def test_iperf_check_schema_violation(self): {'time': 0}, {'port': 'invalid port'}, {'time': 'invalid time'}, - {'port': 'invalid port'}, - {'time': 'invalid time'}, + {'port': '-12a'}, + {'time': '3test22'}, {'port': 0}, {'port': 797979}, {'time': 36000}, From 50921cbff514ff58034a6e5954890e9e5b69eca2 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 8 Jul 2022 01:00:44 +0530 Subject: [PATCH 25/64] [fix] Fixed JSONDecodeError if iperf3 is not installed #412 Fixes #412 --- openwisp_monitoring/check/classes/iperf.py | 16 +++++++++++++- openwisp_monitoring/check/tests/test_iperf.py | 22 +++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 02ca5b45b..0aea24806 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -90,6 +90,14 @@ def check(self, store=True): # TCP mode command = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' res, exit_code = self._exec_command(device_connection, command) + + # Exit code 127 : command doesn't exist + if exit_code == 127: + logger.warning( + f'Iperf3 is not installed on the "{device}", error - {res.strip()}' + ) + return + result_tcp = self._get_iperf_result(res, exit_code, device, mode='TCP') # UDP mode @@ -148,7 +156,13 @@ def _get_iperf_result(self, res, exit_code, device, mode): """ Get iperf test result """ - res_dict = json.loads(res) + + try: + res_dict = json.loads(res) + except json.decoder.JSONDecodeError: + # Errors other than iperf3 test errors + res_dict = {'error': f'error - {res.strip()}'} + if mode == 'TCP': if exit_code != 0: logger.warning( diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 00c69c643..31dab96be 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -258,6 +258,28 @@ def test_iperf_check_schema_violation(self): def test_iperf_check(self): check, _ = self._create_iperf_test_env() + error = "ash: iperf3: not found" + + with self.subTest('Test iperf3 is not installed on the device'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers: + mock_exec_command.side_effect = [(error, 127)] + with patch.object(iperf_logger, 'warning') as mock_warn: + check.perform_check(store=False) + mock_warn.assert_called_with( + f'Iperf3 is not installed on the "{self.device}", error - {error}' + ) + self.assertEqual(mock_warn.call_count, 1) + self.assertEqual(mock_exec_command.call_count, 1) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) + with self.subTest('Test iperf check passes in both TCP & UDP'): with patch.object( Iperf, '_exec_command' From 56543bf191c7df6d649eeff734fe4e98d8b9a0af Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 8 Jul 2022 01:51:10 +0530 Subject: [PATCH 26/64] [requested-changes] Docs improvement --- README.rst | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/README.rst b/README.rst index 208636807..1ad85ac83 100644 --- a/README.rst +++ b/README.rst @@ -809,11 +809,11 @@ Iperf +--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ | **measurement**: | ``iperf`` | +--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **types**: | ``int`` (iperf_result, retransmits, total_packets, lost_packets), | -| | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bytes_tcp, received_bytes_tcp, sent_bps_udp, sent_bytes_udp, jitter, lost_percent) | +| **types**: | | ``int`` (iperf_result, retransmits, total_packets, lost_packets), | +| | | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bytes_tcp, received_bytes_tcp, sent_bps_udp, sent_bytes_udp, jitter, lost_percent) | +--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **fields**: | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | -| **fields**: | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | +| **fields**: | | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | +| | | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | +--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ | **configuration**: | ``iperf`` | +--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ @@ -999,14 +999,15 @@ if there's anything that is not working as intended. Iperf ~~~~~ -This check provides network performance measurements such as maximum achievable bandwidth, jitter, datagram loss etc of the device using `iperf3 utility `_. +This check provides network performance measurements such as maximum achievable bandwidth, +jitter, datagram loss etc of the device using `iperf3 utility `_. -It also supports tuning of various parameters related to timing, buffers and protocols (TCP, UDP with IPv4 and IPv6). - -This check is ``disabled`` by **default**, but you may choose to enable auto creation of this check by setting +This check is **disabled by default**. You can enable auto creation of this check by setting the `OPENWISP_MONITORING_AUTO_IPERF <#OPENWISP_MONITORING_AUTO_IPERF>`_ to ``True``. -You can also change the default values used for iperf checks using +It also supports tuning of various parameters. + +You can also change the parameters used for iperf checks (e.g. timing, port, buffer, etc) using the `OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ setting. Instructions to configure Iperf Check @@ -1024,9 +1025,10 @@ Register your device to OpenWISP and make sure `iperf3 openwrt package `_. +Follow the steps in `"How to configure push updates" section of the openwisp-controller documentation `_ +to allow SSH access to you device from OpenWISP. -**Note:** Make sure device connection is enabled & working with right update strategy ie. ``OpenWISP SSH``. +**Note:** Make sure device connection is enabled & working with right update strategy i.e. ``OpenWRT SSH``. .. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/enable-openwisp-ssh.png :alt: Enable ssh access from openwisp to device @@ -1035,9 +1037,10 @@ We have already well covered this in `How to configure push updates section of o 3. Configure Iperf settings ~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Configure iperf servers in `openwisp settings `_ , The host can be specified by hostname, IPv4 literal, or IPv6 literal. +Configure iperf servers in `openwisp settings `_ , +The host can be specified by hostname, IPv4 literal, or IPv6 literal. -For eg. +For example. .. code-block:: python @@ -1162,8 +1165,8 @@ created automatically for newly registered devices. It's enabled by default. | **default**: | ``False`` | +--------------+-------------+ -Whether `Iperf <#iperf-1>`_ checks are created automatically for devices. The devices must have ``SSH`` `access credential `_ & -`iperf3 openwrt package `_ must be installed on the devices in order for this check to work. +This setting allows you to choose whether `iperf <#iperf-1>`_ checks should be +created automatically for newly registered devices. It's disabled by default. ``OPENWISP_MONITORING_IPERF_CHECK_CONFIG`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1188,7 +1191,6 @@ For example, if you want to change only the **port number** of }, } -Similar to `OPENWISP_MONITORING_PING_CHECK_CONFIG <#OPENWISP_MONITORING_PING_CHECK_CONFIG>`_ , If you want to overriding the default value for any parameter beyond the maximum or minimum value defined in ``openwisp_monitoring.check.classes.iperf.DEFAULT_IPERF_CHECK_CONFIG``, you will also need to override the ``maximum`` or ``minimum`` fields as following: From ef433eaa6b4f59e9ea9ceea05c7d742a2d8301bc Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 8 Jul 2022 19:29:32 +0530 Subject: [PATCH 27/64] [change] Configured celery beat for Iperf check --- .../check/management/commands/__init__.py | 9 ++- .../management/commands/run_iperf_checks.py | 5 ++ openwisp_monitoring/check/tasks.py | 26 ++++++++- openwisp_monitoring/check/tests/test_iperf.py | 56 +++++++++++++++++++ openwisp_monitoring/check/utils.py | 10 +++- tests/openwisp2/settings.py | 16 +++++- 6 files changed, 118 insertions(+), 4 deletions(-) create mode 100644 openwisp_monitoring/check/management/commands/run_iperf_checks.py diff --git a/openwisp_monitoring/check/management/commands/__init__.py b/openwisp_monitoring/check/management/commands/__init__.py index 2cc25609d..92060622b 100644 --- a/openwisp_monitoring/check/management/commands/__init__.py +++ b/openwisp_monitoring/check/management/commands/__init__.py @@ -1,6 +1,6 @@ from django.core.management.base import BaseCommand -from ...utils import run_checks_async +from ...utils import run_checks_async, run_iperf_checks_async class BaseRunChecksCommand(BaseCommand): @@ -8,3 +8,10 @@ class BaseRunChecksCommand(BaseCommand): def handle(self, *args, **options): run_checks_async() + + +class BaseRunIperfChecksCommand(BaseCommand): + help = 'Run all iperf monitoring checks asynchronously' + + def handle(self, *args, **options): + run_iperf_checks_async() diff --git a/openwisp_monitoring/check/management/commands/run_iperf_checks.py b/openwisp_monitoring/check/management/commands/run_iperf_checks.py new file mode 100644 index 000000000..7b43145b7 --- /dev/null +++ b/openwisp_monitoring/check/management/commands/run_iperf_checks.py @@ -0,0 +1,5 @@ +from . import BaseRunIperfChecksCommand + + +class Command(BaseRunIperfChecksCommand): + pass diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 9a509ea2b..22c2cd1ba 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -23,9 +23,33 @@ def run_checks(): This allows to enqueue all the checks that need to be performed and execute them in parallel with multiple workers if needed. """ + # Exclude iperf check from run_checks + iperf_check_path = 'openwisp_monitoring.check.classes.Iperf' + iterator = ( + get_check_model() + .objects.exclude(check_type=iperf_check_path) + .filter(is_active=True) + .only('id') + .values('id') + .iterator() + ) + for check in iterator: + perform_check.delay(check['id']) + + +@shared_task +def run_iperf_checks(): + """ + Retrieves the id of all active iperf checks in chunks of 2000 items + and calls the ``perform_check`` task (defined below) for each of them. + + This allows to enqueue all the checks that need to be performed + and execute them in parallel with multiple workers if needed. + """ + iperf_check_path = 'openwisp_monitoring.check.classes.Iperf' iterator = ( get_check_model() - .objects.filter(is_active=True) + .objects.filter(is_active=True, check_type=iperf_check_path) .only('id') .values('id') .iterator() diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 31dab96be..9d1f7a92f 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -1,5 +1,6 @@ from unittest.mock import call, patch +from django.core import management from django.core.exceptions import ValidationError from django.test import TransactionTestCase from swapper import load_model @@ -12,6 +13,7 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings from ..classes import Iperf +from ..utils import run_iperf_checks_async from .iperf_test_result import RESULT_FAIL, RESULT_TCP, RESULT_UDP Chart = load_model('monitoring', 'Chart') @@ -421,3 +423,57 @@ def test_iperf_check(self): mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) + + def test_iperf_check_utils(self): + check, _ = self._create_iperf_test_env() + + with self.subTest('Test celery task run_iperf_checks'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn: + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + run_iperf_checks_async() + iperf_metric = Metric.objects.get(key='iperf') + self.assertEqual(iperf_metric.content_object, self.device) + points = iperf_metric.read(limit=None, extra_fields=self._RESULT_KEYS) + for key in self._RESULT_KEYS: + self.assertIn(key, points[0]) + self.assertEqual(points[0]['iperf_result'], 1) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) + + with self.subTest('Test management command run_iperf_checks'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn: + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + management.call_command('run_iperf_checks') + iperf_metric = Metric.objects.get(key='iperf') + self.assertEqual(iperf_metric.content_object, self.device) + points = iperf_metric.read(limit=None, extra_fields=self._RESULT_KEYS) + self.assertEqual(len(points[0]), 14) + self.assertEqual(points[0]['iperf_result'], 1) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) diff --git a/openwisp_monitoring/check/utils.py b/openwisp_monitoring/check/utils.py index 4897c77a9..6fad185dd 100644 --- a/openwisp_monitoring/check/utils.py +++ b/openwisp_monitoring/check/utils.py @@ -1,4 +1,4 @@ -from .tasks import run_checks +from .tasks import run_checks, run_iperf_checks def run_checks_async(): @@ -7,3 +7,11 @@ def run_checks_async(): is run in a background worker """ run_checks.delay() + + +def run_iperf_checks_async(): + """ + Calls celery task run_iperf_checks + is run in a background worker + """ + run_iperf_checks.delay() diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 21db9dab6..1d9d241f9 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -2,6 +2,8 @@ import sys from datetime import timedelta +from celery.schedules import crontab + TESTING = 'test' in sys.argv SHELL = 'shell' in sys.argv or 'shell_plus' in sys.argv BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -168,13 +170,25 @@ CELERY_TASK_EAGER_PROPAGATES = True CELERY_BROKER_URL = 'memory://' +# Celery TIME_ZONE should be equal to django TIME_ZONE +# In order to schedule run_iperf_checks on the correct time intervals +CELERY_TIMEZONE = TIME_ZONE + CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', 'schedule': timedelta(minutes=5), 'args': None, 'relative': True, - } + }, + 'run_iperf_checks': { + 'task': 'openwisp_monitoring.check.tasks.run_iperf_checks', + # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules + # Every 5 mins from 00:00 AM to 6:00 AM (night) + 'schedule': crontab(minute='*/1', hour='0-6'), + 'args': None, + 'relative': True, + }, } EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' From 38991a9914a3abab5f00be688624b59706734d59 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 8 Jul 2022 20:16:11 +0530 Subject: [PATCH 28/64] [docs] Added Iperf check celery beat configuration #390 Closes #390 --- README.rst | 43 +++++++++++++++++++++++++++++-------- tests/openwisp2/settings.py | 2 +- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/README.rst b/README.rst index 1ad85ac83..4d3af9a20 100644 --- a/README.rst +++ b/README.rst @@ -334,6 +334,7 @@ Follow the setup instructions of `openwisp-controller .. code-block:: python from django.conf import settings + from celery.schedules import crontab from django.conf.urls import include, url from django.contrib.staticfiles.urls import staticfiles_urlpatterns @@ -372,12 +373,19 @@ Configure celery (you may use a different broker if you want): # here we show how to configure celery with redis but you can # use other brokers if you want, consult the celery docs + CELERY_TIMEZONE = TIME_ZONE CELERY_BROKER_URL = 'redis://localhost/1' CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', 'schedule': timedelta(minutes=5), }, + 'run_iperf_checks': { + 'task': 'openwisp_monitoring.check.tasks.run_iperf_checks', + # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules + # Every 5 mins from 00:00 AM to 6:00 AM (night) + 'schedule': crontab(minute='*/5', hour='0-6'), + }, # Delete old WifiSession 'delete_wifi_clients_and_sessions': { 'task': 'openwisp_monitoring.monitoring.tasks.delete_wifi_clients_and_sessions', @@ -1010,11 +1018,14 @@ It also supports tuning of various parameters. You can also change the parameters used for iperf checks (e.g. timing, port, buffer, etc) using the `OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ setting. -Instructions to configure Iperf Check -------------------------------------- +Usage Instructions +------------------ + +How to configure iperf check +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 1. Register your device to OpenWISP -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +################################### Register your device to OpenWISP and make sure `iperf3 openwrt package `_ is installed on the device if not run : @@ -1023,7 +1034,7 @@ Register your device to OpenWISP and make sure `iperf3 openwrt package `_ to allow SSH access to you device from OpenWISP. @@ -1035,7 +1046,7 @@ to allow SSH access to you device from OpenWISP. :align: center 3. Configure Iperf settings -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +########################### Configure iperf servers in `openwisp settings `_ , The host can be specified by hostname, IPv4 literal, or IPv6 literal. @@ -1053,10 +1064,10 @@ For example. } 4. Run the check -~~~~~~~~~~~~~~~~ +################ This should happen automatically if you have celery running in the background. For testing, you can -run this check manually using the `run_checks <#run_checks>`_ command. After that, you should see the +run this check manually using the `run_iperf_checks <#run_iperf_checks>`_ command. After that, you should see the iperf network measurements charts. .. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/iperf-charts.png @@ -2098,8 +2109,8 @@ Management commands ``run_checks`` ~~~~~~~~~~~~~~ -This command will execute all the `available checks <#available-checks>`_ for all the devices. -By default checks are run periodically by *celery beat*. You can learn more +This command will execute all the `available checks <#available-checks>`_ except `iperf checks <#iperf-1>`_ +for all the devices. By default checks are run periodically by *celery beat*. You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. Example usage: @@ -2109,6 +2120,20 @@ Example usage: cd tests/ ./manage.py run_checks +``run_iperf_checks`` +~~~~~~~~~~~~~~~~~~~~ + +This command will execute all the `available iperf checks <#iperf-1>`_ for all the devices. +By default iperf checks are run periodically between 00:00 AM to 6:00 AM every night by *celery beat*. +You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. + +Example usage: + +.. code-block:: shell + + cd tests/ + ./manage.py run_iperf_checks + ``migrate_timeseries`` ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 1d9d241f9..c8c6f36be 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -185,7 +185,7 @@ 'task': 'openwisp_monitoring.check.tasks.run_iperf_checks', # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules # Every 5 mins from 00:00 AM to 6:00 AM (night) - 'schedule': crontab(minute='*/1', hour='0-6'), + 'schedule': crontab(minute='*/5', hour='0-6'), 'args': None, 'relative': True, }, From 64b607da46f3041735fcbce771c9733847af4444 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Sat, 9 Jul 2022 00:30:26 +0530 Subject: [PATCH 29/64] [change] Configured celery beat for Iperf check in settings #390 Closes #390 --- .../check/management/commands/__init__.py | 9 +-- .../management/commands/run_iperf_checks.py | 5 -- openwisp_monitoring/check/tasks.py | 29 +--------- openwisp_monitoring/check/tests/test_iperf.py | 56 ------------------- openwisp_monitoring/check/utils.py | 10 +--- tests/openwisp2/settings.py | 14 +++-- 6 files changed, 15 insertions(+), 108 deletions(-) delete mode 100644 openwisp_monitoring/check/management/commands/run_iperf_checks.py diff --git a/openwisp_monitoring/check/management/commands/__init__.py b/openwisp_monitoring/check/management/commands/__init__.py index 92060622b..2cc25609d 100644 --- a/openwisp_monitoring/check/management/commands/__init__.py +++ b/openwisp_monitoring/check/management/commands/__init__.py @@ -1,6 +1,6 @@ from django.core.management.base import BaseCommand -from ...utils import run_checks_async, run_iperf_checks_async +from ...utils import run_checks_async class BaseRunChecksCommand(BaseCommand): @@ -8,10 +8,3 @@ class BaseRunChecksCommand(BaseCommand): def handle(self, *args, **options): run_checks_async() - - -class BaseRunIperfChecksCommand(BaseCommand): - help = 'Run all iperf monitoring checks asynchronously' - - def handle(self, *args, **options): - run_iperf_checks_async() diff --git a/openwisp_monitoring/check/management/commands/run_iperf_checks.py b/openwisp_monitoring/check/management/commands/run_iperf_checks.py deleted file mode 100644 index 7b43145b7..000000000 --- a/openwisp_monitoring/check/management/commands/run_iperf_checks.py +++ /dev/null @@ -1,5 +0,0 @@ -from . import BaseRunIperfChecksCommand - - -class Command(BaseRunIperfChecksCommand): - pass diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 22c2cd1ba..a368b4639 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -8,6 +8,7 @@ from swapper import load_model logger = logging.getLogger(__name__) +checks = settings.OPENWISP_MONITORING_CHECKS def get_check_model(): @@ -15,7 +16,7 @@ def get_check_model(): @shared_task -def run_checks(): +def run_checks(checks=checks): """ Retrieves the id of all active checks in chunks of 2000 items and calls the ``perform_check`` task (defined below) for each of them. @@ -23,33 +24,9 @@ def run_checks(): This allows to enqueue all the checks that need to be performed and execute them in parallel with multiple workers if needed. """ - # Exclude iperf check from run_checks - iperf_check_path = 'openwisp_monitoring.check.classes.Iperf' - iterator = ( - get_check_model() - .objects.exclude(check_type=iperf_check_path) - .filter(is_active=True) - .only('id') - .values('id') - .iterator() - ) - for check in iterator: - perform_check.delay(check['id']) - - -@shared_task -def run_iperf_checks(): - """ - Retrieves the id of all active iperf checks in chunks of 2000 items - and calls the ``perform_check`` task (defined below) for each of them. - - This allows to enqueue all the checks that need to be performed - and execute them in parallel with multiple workers if needed. - """ - iperf_check_path = 'openwisp_monitoring.check.classes.Iperf' iterator = ( get_check_model() - .objects.filter(is_active=True, check_type=iperf_check_path) + .objects.filter(is_active=True, check_type__in=checks) .only('id') .values('id') .iterator() diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 9d1f7a92f..31dab96be 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -1,6 +1,5 @@ from unittest.mock import call, patch -from django.core import management from django.core.exceptions import ValidationError from django.test import TransactionTestCase from swapper import load_model @@ -13,7 +12,6 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings from ..classes import Iperf -from ..utils import run_iperf_checks_async from .iperf_test_result import RESULT_FAIL, RESULT_TCP, RESULT_UDP Chart = load_model('monitoring', 'Chart') @@ -423,57 +421,3 @@ def test_iperf_check(self): mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) - - def test_iperf_check_utils(self): - check, _ = self._create_iperf_test_env() - - with self.subTest('Test celery task run_iperf_checks'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn: - mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - - run_iperf_checks_async() - iperf_metric = Metric.objects.get(key='iperf') - self.assertEqual(iperf_metric.content_object, self.device) - points = iperf_metric.read(limit=None, extra_fields=self._RESULT_KEYS) - for key in self._RESULT_KEYS: - self.assertIn(key, points[0]) - self.assertEqual(points[0]['iperf_result'], 1) - self.assertEqual(mock_warn.call_count, 0) - self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) - - with self.subTest('Test management command run_iperf_checks'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn: - mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - - management.call_command('run_iperf_checks') - iperf_metric = Metric.objects.get(key='iperf') - self.assertEqual(iperf_metric.content_object, self.device) - points = iperf_metric.read(limit=None, extra_fields=self._RESULT_KEYS) - self.assertEqual(len(points[0]), 14) - self.assertEqual(points[0]['iperf_result'], 1) - self.assertEqual(mock_warn.call_count, 0) - self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) diff --git a/openwisp_monitoring/check/utils.py b/openwisp_monitoring/check/utils.py index 6fad185dd..4897c77a9 100644 --- a/openwisp_monitoring/check/utils.py +++ b/openwisp_monitoring/check/utils.py @@ -1,4 +1,4 @@ -from .tasks import run_checks, run_iperf_checks +from .tasks import run_checks def run_checks_async(): @@ -7,11 +7,3 @@ def run_checks_async(): is run in a background worker """ run_checks.delay() - - -def run_iperf_checks_async(): - """ - Calls celery task run_iperf_checks - is run in a background worker - """ - run_iperf_checks.delay() diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index c8c6f36be..36804b498 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -173,20 +173,26 @@ # Celery TIME_ZONE should be equal to django TIME_ZONE # In order to schedule run_iperf_checks on the correct time intervals CELERY_TIMEZONE = TIME_ZONE +OPENWISP_MONITORING_CHECKS = [ + 'openwisp_monitoring.check.classes.Ping', + 'openwisp_monitoring.check.classes.ConfigApplied', + 'openwisp_monitoring.check.classes.Iperf', +] CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', 'schedule': timedelta(minutes=5), - 'args': None, + # Executes only ping and config check every 5 mins + 'args': (OPENWISP_MONITORING_CHECKS[:2],), 'relative': True, }, 'run_iperf_checks': { - 'task': 'openwisp_monitoring.check.tasks.run_iperf_checks', + 'task': 'openwisp_monitoring.check.tasks.run_checks', # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules - # Every 5 mins from 00:00 AM to 6:00 AM (night) + # Executes only iperf check every 5 mins from 00:00 AM to 6:00 AM (night) 'schedule': crontab(minute='*/5', hour='0-6'), - 'args': None, + 'args': (OPENWISP_MONITORING_CHECKS[2:],), 'relative': True, }, } From ce23bd0c38068dfcad9b993da39a83c7f5566197 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Sat, 9 Jul 2022 01:58:53 +0530 Subject: [PATCH 30/64] [docs] Added iperf celery beat configuration - Added iperf celery beat configuration. - Reverted previous docs changes. --- README.rst | 62 +++++++++++++++++++++++++++++++----------------------- 1 file changed, 36 insertions(+), 26 deletions(-) diff --git a/README.rst b/README.rst index 4d3af9a20..535bf3f12 100644 --- a/README.rst +++ b/README.rst @@ -373,23 +373,30 @@ Configure celery (you may use a different broker if you want): # here we show how to configure celery with redis but you can # use other brokers if you want, consult the celery docs + # Celery TIME_ZONE should be equal to django TIME_ZONE + # In order to schedule run_iperf_checks on the correct time intervals CELERY_TIMEZONE = TIME_ZONE + OPENWISP_MONITORING_CHECKS = [ + 'openwisp_monitoring.check.classes.Ping', + 'openwisp_monitoring.check.classes.ConfigApplied', + 'openwisp_monitoring.check.classes.Iperf', + ] CELERY_BROKER_URL = 'redis://localhost/1' CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', 'schedule': timedelta(minutes=5), + # Executes only ping and config check every 5 mins + 'args': (OPENWISP_MONITORING_CHECKS[:2],), + 'relative': True, }, 'run_iperf_checks': { - 'task': 'openwisp_monitoring.check.tasks.run_iperf_checks', - # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules - # Every 5 mins from 00:00 AM to 6:00 AM (night) - 'schedule': crontab(minute='*/5', hour='0-6'), - }, - # Delete old WifiSession - 'delete_wifi_clients_and_sessions': { - 'task': 'openwisp_monitoring.monitoring.tasks.delete_wifi_clients_and_sessions', - 'schedule': timedelta(days=180), + 'task': 'openwisp_monitoring.check.tasks.run_checks', + # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules + # Executes only iperf check every 5 mins from 00:00 AM to 6:00 AM (night) + 'schedule': crontab(minute='*/5', hour='0-6'), + 'args': (OPENWISP_MONITORING_CHECKS[2:],), + 'relative': True, }, } @@ -1051,6 +1058,9 @@ to allow SSH access to you device from OpenWISP. Configure iperf servers in `openwisp settings `_ , The host can be specified by hostname, IPv4 literal, or IPv6 literal. +**Note:** By default iperf checks are run periodically between **00:00 AM to 6:00 AM** every night by *celery beat*. +You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. + For example. .. code-block:: python @@ -1067,7 +1077,7 @@ For example. ################ This should happen automatically if you have celery running in the background. For testing, you can -run this check manually using the `run_iperf_checks <#run_iperf_checks>`_ command. After that, you should see the +run this check manually using the `run_checks <#run_checks>`_ command. After that, you should see the iperf network measurements charts. .. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/iperf-charts.png @@ -1663,6 +1673,20 @@ domain, you can use this option to change the base of the url, this will enable you to point all the API urls to your openwisp-monitoring API server's domain, example: ``https://mymonitoring.myapp.com``. +``OPENWISP_MONITORING_CHECKS`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+---------------------------------------------------------+ +| **type**: | ``list`` | ++--------------+---------------------------------------------------------+ +| **default**: |``['openwisp_monitoring.check.classes.Ping',`` | +| |``'openwisp_monitoring.check.classes.ConfigApplied',`` | +| |``'openwisp_monitoring.check.classes.Iperf'],`` | ++--------------+---------------------------------------------------------+ + +This list will allows you to configure celery beat configuration for the checks. +You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. + Registering / Unregistering Metric Configuration ------------------------------------------------ @@ -2109,8 +2133,8 @@ Management commands ``run_checks`` ~~~~~~~~~~~~~~ -This command will execute all the `available checks <#available-checks>`_ except `iperf checks <#iperf-1>`_ -for all the devices. By default checks are run periodically by *celery beat*. You can learn more +This command will execute all the `available checks <#available-checks>`_ for all the devices. +By default checks are run periodically by *celery beat*. You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. Example usage: @@ -2120,20 +2144,6 @@ Example usage: cd tests/ ./manage.py run_checks -``run_iperf_checks`` -~~~~~~~~~~~~~~~~~~~~ - -This command will execute all the `available iperf checks <#iperf-1>`_ for all the devices. -By default iperf checks are run periodically between 00:00 AM to 6:00 AM every night by *celery beat*. -You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. - -Example usage: - -.. code-block:: shell - - cd tests/ - ./manage.py run_iperf_checks - ``migrate_timeseries`` ~~~~~~~~~~~~~~~~~~~~~~ From 93f9220243c4a3481bef065e5a0b48fec9da00db Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 11 Jul 2022 19:56:23 +0530 Subject: [PATCH 31/64] [requested-changes] Moved iperf celery beat settings --- README.rst | 59 ++++++++++++--------------- openwisp_monitoring/check/settings.py | 1 + openwisp_monitoring/check/tasks.py | 17 ++++++-- tests/openwisp2/settings.py | 11 ++--- 4 files changed, 45 insertions(+), 43 deletions(-) diff --git a/README.rst b/README.rst index 535bf3f12..805abd37e 100644 --- a/README.rst +++ b/README.rst @@ -376,26 +376,13 @@ Configure celery (you may use a different broker if you want): # Celery TIME_ZONE should be equal to django TIME_ZONE # In order to schedule run_iperf_checks on the correct time intervals CELERY_TIMEZONE = TIME_ZONE - OPENWISP_MONITORING_CHECKS = [ - 'openwisp_monitoring.check.classes.Ping', - 'openwisp_monitoring.check.classes.ConfigApplied', - 'openwisp_monitoring.check.classes.Iperf', - ] CELERY_BROKER_URL = 'redis://localhost/1' CELERY_BEAT_SCHEDULE = { + # Celery beat configuration for auto checks ie ping & config applied 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', 'schedule': timedelta(minutes=5), - # Executes only ping and config check every 5 mins - 'args': (OPENWISP_MONITORING_CHECKS[:2],), - 'relative': True, - }, - 'run_iperf_checks': { - 'task': 'openwisp_monitoring.check.tasks.run_checks', - # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules - # Executes only iperf check every 5 mins from 00:00 AM to 6:00 AM (night) - 'schedule': crontab(minute='*/5', hour='0-6'), - 'args': (OPENWISP_MONITORING_CHECKS[2:],), + 'args': (None,), 'relative': True, }, } @@ -1058,9 +1045,6 @@ to allow SSH access to you device from OpenWISP. Configure iperf servers in `openwisp settings `_ , The host can be specified by hostname, IPv4 literal, or IPv6 literal. -**Note:** By default iperf checks are run periodically between **00:00 AM to 6:00 AM** every night by *celery beat*. -You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. - For example. .. code-block:: python @@ -1073,6 +1057,31 @@ For example. 'c9734710-db30-46b0-a2fc-01f01046fe4f': ['2001:db8::1'], } +Add celery beat configuration for iperf check in `openwisp settings `_ + +.. code-block:: python + + CELERY_BEAT_SCHEDULE = { + # Celery beat configuration for auto checks ie ping & config applied + 'run_checks': { + 'task': 'openwisp_monitoring.check.tasks.run_checks', + 'schedule': timedelta(minutes=5), + 'args': (None,), + 'relative': True, + }, + # Celery beat configuration for iperf check + 'run_iperf_checks': { + 'task': 'openwisp_monitoring.check.tasks.run_checks', + # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules + # Executes check every 5 mins from 00:00 AM to 6:00 AM (night) + 'schedule': crontab(minute='*/5', hour='0-6'), + # Iperf check path + 'args': (['openwisp_monitoring.check.classes.Iperf'],), + 'relative': True, + }, + +**Note:** We recommended to configure this check for night or during non peak traffic times to not interfere with standard traffic. + 4. Run the check ################ @@ -1673,20 +1682,6 @@ domain, you can use this option to change the base of the url, this will enable you to point all the API urls to your openwisp-monitoring API server's domain, example: ``https://mymonitoring.myapp.com``. -``OPENWISP_MONITORING_CHECKS`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -+--------------+---------------------------------------------------------+ -| **type**: | ``list`` | -+--------------+---------------------------------------------------------+ -| **default**: |``['openwisp_monitoring.check.classes.Ping',`` | -| |``'openwisp_monitoring.check.classes.ConfigApplied',`` | -| |``'openwisp_monitoring.check.classes.Iperf'],`` | -+--------------+---------------------------------------------------------+ - -This list will allows you to configure celery beat configuration for the checks. -You can learn more about this in `Setup <#setup-integrate-in-an-existing-django-project>`_. - Registering / Unregistering Metric Configuration ------------------------------------------------ diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 3f99a881c..e8f0a1b3a 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -15,3 +15,4 @@ AUTO_IPERF = get_settings_value('AUTO_IPERF', False) IPERF_SERVERS = get_settings_value('IPERF_SERVERS', {}) IPERF_CHECK_CONFIG = get_settings_value('IPERF_CHECK_CONFIG', {}) +CHECKS_LIST = get_settings_value('CHECK_LIST', list(dict(CHECK_CLASSES).keys())) diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index a368b4639..52b568992 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -4,11 +4,12 @@ from celery import shared_task from django.conf import settings from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import ObjectDoesNotExist +from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist from swapper import load_model +from .settings import CHECKS_LIST + logger = logging.getLogger(__name__) -checks = settings.OPENWISP_MONITORING_CHECKS def get_check_model(): @@ -16,7 +17,7 @@ def get_check_model(): @shared_task -def run_checks(checks=checks): +def run_checks(checks=CHECKS_LIST): """ Retrieves the id of all active checks in chunks of 2000 items and calls the ``perform_check`` task (defined below) for each of them. @@ -24,6 +25,16 @@ def run_checks(checks=checks): This allows to enqueue all the checks that need to be performed and execute them in parallel with multiple workers if needed. """ + if checks is None: + # Executes only auto checks ie. ping and config applied + checks = CHECKS_LIST[:2] + + # Checks that are defined in celery beat configuration + if not isinstance(checks, list): + raise ImproperlyConfigured(f'Check path {checks} should be of type "list"') + if not all(check_path in CHECKS_LIST for check_path in checks): + raise ImproperlyConfigured(f'Check path {checks} should be in {CHECKS_LIST}') + iterator = ( get_check_model() .objects.filter(is_active=True, check_type__in=checks) diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 36804b498..5066dadcf 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -173,18 +173,12 @@ # Celery TIME_ZONE should be equal to django TIME_ZONE # In order to schedule run_iperf_checks on the correct time intervals CELERY_TIMEZONE = TIME_ZONE -OPENWISP_MONITORING_CHECKS = [ - 'openwisp_monitoring.check.classes.Ping', - 'openwisp_monitoring.check.classes.ConfigApplied', - 'openwisp_monitoring.check.classes.Iperf', -] CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', 'schedule': timedelta(minutes=5), - # Executes only ping and config check every 5 mins - 'args': (OPENWISP_MONITORING_CHECKS[:2],), + 'args': (None,), 'relative': True, }, 'run_iperf_checks': { @@ -192,7 +186,8 @@ # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules # Executes only iperf check every 5 mins from 00:00 AM to 6:00 AM (night) 'schedule': crontab(minute='*/5', hour='0-6'), - 'args': (OPENWISP_MONITORING_CHECKS[2:],), + # Check path + 'args': (['openwisp_monitoring.check.classes.Iperf'],), 'relative': True, }, } From 840029189f6fa20ae0d371cabaa915ba25288b75 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 12 Jul 2022 00:09:05 +0530 Subject: [PATCH 32/64] [change] Removed manual conversion - Removed manual conversion of iperf test result. - Improved tests. --- openwisp_monitoring/check/classes/iperf.py | 85 +++++------- openwisp_monitoring/check/tests/test_iperf.py | 122 +++++++++++++----- .../db/backends/influxdb/queries.py | 14 +- 3 files changed, 125 insertions(+), 96 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 0aea24806..cf01e2c50 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -1,5 +1,6 @@ -import json import logging +from json import loads +from json.decoder import JSONDecodeError from django.core.exceptions import ValidationError from jsonschema import draft7_format_checker, validate @@ -89,21 +90,21 @@ def check(self, store=True): # TCP mode command = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' - res, exit_code = self._exec_command(device_connection, command) + result, exit_code = self._exec_command(device_connection, command) # Exit code 127 : command doesn't exist if exit_code == 127: logger.warning( - f'Iperf3 is not installed on the "{device}", error - {res.strip()}' + f'Iperf3 is not installed on the "{device}", error - {result.strip()}' ) return - result_tcp = self._get_iperf_result(res, exit_code, device, mode='TCP') + result_tcp = self._get_iperf_result(result, exit_code, device, mode='TCP') # UDP mode command = f'iperf3 -c {servers[0]} -p {port} -t {time} -u -J' - res, exit_code = self._exec_command(device_connection, command) - result_udp = self._get_iperf_result(res, exit_code, device, mode='UDP') + result, exit_code = self._exec_command(device_connection, command) + result_udp = self._get_iperf_result(result, exit_code, device, mode='UDP') if store: # Store iperf_result field 1 if any mode passes, store 0 when both fails @@ -152,87 +153,61 @@ def _get_param(self, param): """ return self.params.get(param, self.schema['properties'][param]['default']) - def _get_iperf_result(self, res, exit_code, device, mode): + def _get_iperf_result(self, result, exit_code, device, mode): """ - Get iperf test result + Returns iperf test result """ try: - res_dict = json.loads(res) - except json.decoder.JSONDecodeError: + result = loads(result) + except JSONDecodeError: # Errors other than iperf3 test errors - res_dict = {'error': f'error - {res.strip()}'} + result = {'error': f'error - {result.strip()}'} if mode == 'TCP': if exit_code != 0: - logger.warning( - f'Iperf check failed for "{device}", {res_dict["error"]}' - ) + logger.warning(f'Iperf check failed for "{device}", {result["error"]}') return { 'iperf_result': 0, 'sent_bps_tcp': 0.0, 'received_bps_tcp': 0.0, - 'sent_bytes_tcp': 0.0, - 'received_bytes_tcp': 0.0, + 'sent_bytes_tcp': 0, + 'received_bytes_tcp': 0, 'retransmits': 0, } else: - # Gbps = Gigabits per second - # GB = GigaBytes - # Todo : Remove below coversion once - # https://github.com/openwisp/openwisp-monitoring/pull/397 get merged - sent_json = res_dict['end']['sum_sent'] - recv_json = res_dict['end']['sum_received'] - sent_bytes = sent_json['bytes'] - sent_bytes_GB = sent_bytes / 1000000000 - sent_bps = sent_json['bits_per_second'] - sent_Gbps = sent_bps / 1000000000 - received_bytes = recv_json['bytes'] - received_bytes_GB = received_bytes / 1000000000 - received_bps = recv_json['bits_per_second'] - received_Gbps = received_bps / 1000000000 - retransmits = sent_json['retransmits'] + sent = result['end']['sum_sent'] + received = result['end']['sum_received'] return { 'iperf_result': 1, - 'sent_bps_tcp': round(sent_Gbps, 2), - 'received_bps_tcp': round(received_Gbps, 2), - 'sent_bytes_tcp': round(sent_bytes_GB, 2), - 'received_bytes_tcp': round(received_bytes_GB, 2), - 'retransmits': retransmits, + 'sent_bps_tcp': float(sent['bits_per_second']), + 'received_bps_tcp': float(received['bits_per_second']), + 'sent_bytes_tcp': sent['bytes'], + 'received_bytes_tcp': received['bytes'], + 'retransmits': sent['retransmits'], } elif mode == 'UDP': if exit_code != 0: - logger.warning( - f'Iperf check failed for "{device}", {res_dict["error"]}' - ) + logger.warning(f'Iperf check failed for "{device}", {result["error"]}') return { 'iperf_result': 0, 'sent_bps_udp': 0.0, - 'sent_bytes_udp': 0.0, + 'sent_bytes_udp': 0, 'jitter': 0.0, 'total_packets': 0, 'lost_packets': 0, 'lost_percent': 0.0, } else: - sent_bytes = res_dict['end']['sum']['bytes'] - sent_bytes_MB = sent_bytes / 1000000 - sent_bps = res_dict['end']['sum']['bits_per_second'] - sent_Mbps = sent_bps / 1000000 - jitter_ms = res_dict['end']['sum']['jitter_ms'] - jitter_ms = res_dict['end']['sum']['jitter_ms'] - total_packets = res_dict['end']['sum']['packets'] - lost_packets = res_dict['end']['sum']['lost_packets'] - lost_percent = float(res_dict['end']['sum']['lost_percent']) return { 'iperf_result': 1, - 'sent_bps_udp': round(sent_Mbps, 2), - 'sent_bytes_udp': round(sent_bytes_MB, 2), - 'jitter': round(jitter_ms, 2), - 'total_packets': total_packets, - 'lost_packets': lost_packets, - 'lost_percent': round(lost_percent, 2), + 'sent_bps_udp': float(result['end']['sum']['bits_per_second']), + 'sent_bytes_udp': result['end']['sum']['bytes'], + 'jitter': float(result['end']['sum']['jitter_ms']), + 'total_packets': result['end']['sum']['packets'], + 'lost_packets': result['end']['sum']['lost_packets'], + 'lost_percent': float(result['end']['sum']['lost_percent']), } def store_result(self, result): diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 31dab96be..d4cb8e1d3 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -1,3 +1,4 @@ +from json import loads from unittest.mock import call, patch from django.core.exceptions import ValidationError @@ -82,14 +83,20 @@ def test_iperf_check_no_params( # By default check params {} check, _ = self._create_iperf_test_env() + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] result = check.perform_check(store=False) for key in self._RESULT_KEYS: self.assertIn(key, result) self.assertEqual(result['iperf_result'], 1) - self.assertEqual(result['sent_bps_tcp'], 44.04) - self.assertEqual(result['received_bytes_tcp'], 55.05) - self.assertEqual(result['jitter'], 0.01) - self.assertEqual(result['total_packets'], 40) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_exec_command.call_count, 2) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @@ -104,7 +111,8 @@ def test_iperf_check_params( self, mock_warn, mock_get_iperf_servers, mock_exec_command ): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] check, dc = self._create_iperf_test_env() test_params = {'port': 6201, 'time': 20} check.params = test_params @@ -123,10 +131,14 @@ def test_iperf_check_params( for key in self._RESULT_KEYS: self.assertIn(key, result) self.assertEqual(result['iperf_result'], 1) - self.assertEqual(result['sent_bps_tcp'], 44.04) - self.assertEqual(result['received_bytes_tcp'], 55.05) - self.assertEqual(result['jitter'], 0.01) - self.assertEqual(result['total_packets'], 40) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_exec_command.call_count, 2) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @@ -136,7 +148,6 @@ def test_iperf_check_params( @patch.object( Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] ) - @patch.object(iperf_logger, 'warning') @patch.object( settings, 'IPERF_CHECK_CONFIG', @@ -145,11 +156,10 @@ def test_iperf_check_params( 'time': {'default': 120}, }, ) - def test_iperf_check_config( - self, mock_warn, mock_get_iperf_servers, mock_exec_command, *args - ): + def test_iperf_check_config(self, mock_get_iperf_servers, mock_exec_command, *args): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] check, dc = self._create_iperf_test_env() self._EXPECTED_COMMAND_CALLS = [ call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -J'), @@ -160,11 +170,14 @@ def test_iperf_check_config( for key in self._RESULT_KEYS: self.assertIn(key, result) self.assertEqual(result['iperf_result'], 1) - self.assertEqual(result['sent_bps_tcp'], 44.04) - self.assertEqual(result['received_bytes_tcp'], 55.05) - self.assertEqual(result['jitter'], 0.01) - self.assertEqual(result['total_packets'], 40) - self.assertEqual(mock_warn.call_count, 0) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) self.assertEqual(mock_exec_command.call_count, 2) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) @@ -259,6 +272,8 @@ def test_iperf_check_schema_violation(self): def test_iperf_check(self): check, _ = self._create_iperf_test_env() error = "ash: iperf3: not found" + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] with self.subTest('Test iperf3 is not installed on the device'): with patch.object( @@ -296,17 +311,28 @@ def test_iperf_check(self): result = check.perform_check(store=False) for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) - self.assertEqual(result['sent_bps_tcp'], 44.04) - self.assertEqual(result['received_bytes_tcp'], 55.05) - self.assertEqual(result['retransmits'], 0) - self.assertEqual(result['sent_bps_udp'], 1.05) - self.assertEqual(result['sent_bytes_udp'], 1.31) - self.assertEqual(result['jitter'], 0.01) - self.assertEqual(result['total_packets'], 40) - self.assertEqual(result['lost_packets'], 0) - self.assertEqual(result['lost_percent'], 0.0) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bps_tcp'], + tcp_result['sum_received']['bits_per_second'], + ) + self.assertEqual( + result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual( + result['retransmits'], tcp_result['sum_sent']['retransmits'] + ) + self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) + self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(result['lost_percent'], udp_result['lost_percent']) self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Check.objects.count(), 3) @@ -353,8 +379,14 @@ def test_iperf_check(self): self.assertEqual(result['iperf_result'], 0) self.assertEqual(result['sent_bps_tcp'], 0.0) self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) self.assertEqual(result['jitter'], 0.0) self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) @@ -380,11 +412,27 @@ def test_iperf_check(self): for key in self._RESULT_KEYS: self.assertIn(key, result) self.assertEqual(result['iperf_result'], 1) - self.assertEqual(result['sent_bps_tcp'], 44.04) - self.assertEqual(result['sent_bytes_tcp'], 55.05) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bps_tcp'], + tcp_result['sum_received']['bits_per_second'], + ) + self.assertEqual( + result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual( + result['retransmits'], tcp_result['sum_sent']['retransmits'] + ) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) self.assertEqual(result['jitter'], 0.0) self.assertEqual(result['total_packets'], 0) - self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(result['lost_percent'], 0.0) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) @@ -411,8 +459,14 @@ def test_iperf_check(self): self.assertEqual(result['iperf_result'], 1) self.assertEqual(result['sent_bps_tcp'], 0.0) self.assertEqual(result['received_bps_tcp'], 0.0) - self.assertEqual(result['jitter'], 0.01) - self.assertEqual(result['total_packets'], 40) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) + self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(result['lost_percent'], udp_result['lost_percent']) self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index cf3454f55..48310c05b 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -102,17 +102,17 @@ }, 'bandwidth_tcp': { 'influxdb': ( - "SELECT MEAN(sent_bps_tcp) AS sent, " - "MEAN(received_bps_tcp) AS received FROM {key} WHERE " + "SELECT MEAN(sent_bps_tcp) / 1000000000 AS sent, " + "MEAN(received_bps_tcp) / 1000000000 AS received FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d)" ) }, 'transfer_tcp': { 'influxdb': ( - "SELECT SUM(received_bytes_tcp) AS received, " - "SUM(sent_bytes_tcp) AS sent," - "SUM(sent_bytes_tcp) + SUM(received_bytes_tcp) AS total FROM {key} WHERE " + "SELECT SUM(received_bytes_tcp) / 1000000000 AS received, " + "SUM(sent_bytes_tcp) / 1000000000 AS sent," + "((SUM(sent_bytes_tcp) + SUM(received_bytes_tcp)) / 1000000000) AS total FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d)" ) @@ -126,14 +126,14 @@ }, 'bandwidth_udp': { 'influxdb': ( - "SELECT MEAN(sent_bps_udp) AS sent FROM {key} " + "SELECT MEAN(sent_bps_udp) / 1000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, 'transfer_udp': { 'influxdb': ( - "SELECT SUM(sent_bytes_udp) AS sent FROM {key} " + "SELECT SUM(sent_bytes_udp) / 1000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d)" ) From f2b31104284172bb059d7d4f11956f7f1a8417e1 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 12 Jul 2022 16:43:41 +0530 Subject: [PATCH 33/64] [docs] Minor changes --- README.rst | 27 +++++++++++----------- openwisp_monitoring/check/classes/iperf.py | 1 - openwisp_monitoring/check/tasks.py | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/README.rst b/README.rst index 805abd37e..6e3d28f56 100644 --- a/README.rst +++ b/README.rst @@ -808,19 +808,20 @@ Mobile Access Technology in use Iperf ~~~~~ -+--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **measurement**: | ``iperf`` | -+--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **types**: | | ``int`` (iperf_result, retransmits, total_packets, lost_packets), | -| | | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bytes_tcp, received_bytes_tcp, sent_bps_udp, sent_bytes_udp, jitter, lost_percent) | -+--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **fields**: | | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | -| | | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | -+--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **configuration**: | ``iperf`` | -+--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -| **charts**: | ``bandwidth_tcp``, ``transfer_tcp``, ``retransmits``, ``bandwidth_udp``, ``transfer_udp``, ``jitter``, ``datagram``, ``datagram_loss`` | -+--------------------+----------------------------------------------------------------------------------------------------------------------------------------+ ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **measurement**: | ``iperf`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **types**: | | ``int`` (iperf_result, sent_bytes_tcp, received_bytes_tcp, retransmits, sent_bytes_udp, total_packets, lost_packets), | +| | | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bps_udp, jitter, lost_percent) | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **fields**: | | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | +| | | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **configuration**: | ``iperf`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **charts**: | | ``bandwidth_tcp``, ``transfer_tcp``, ``retransmits``, ``bandwidth_udp`` | +| | | ``transfer_udp``, ``jitter``, ``datagram``, ``datagram_loss`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ **Bandwidth (TCP)**: diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index cf01e2c50..09f88fb2d 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -34,7 +34,6 @@ 'type': 'integer', # Sets the interval time in seconds # between periodic bandwidth, jitter, and loss reports. - # If zero, no periodic reports are printed. 'default': 10, 'minimum': 1, # arbitrary chosen to avoid slowing down the queue (30min) diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 52b568992..49cec5073 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -29,7 +29,7 @@ def run_checks(checks=CHECKS_LIST): # Executes only auto checks ie. ping and config applied checks = CHECKS_LIST[:2] - # Checks that are defined in celery beat configuration + # Checks that are included celery beat configuration if not isinstance(checks, list): raise ImproperlyConfigured(f'Check path {checks} should be of type "list"') if not all(check_path in CHECKS_LIST for check_path in checks): From 91d472aa1eaa9b312c391847094dc3c05b270101 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 12 Jul 2022 23:07:15 +0530 Subject: [PATCH 34/64] [requested-changes] Removed CHECK_LIST from run_checks args - Updated docs. - Moved iperf test constants. --- README.rst | 35 ++++++++++--------- openwisp_monitoring/check/tasks.py | 3 +- openwisp_monitoring/check/tests/test_iperf.py | 33 ++++++++--------- tests/openwisp2/settings.py | 2 +- 4 files changed, 37 insertions(+), 36 deletions(-) diff --git a/README.rst b/README.rst index 6e3d28f56..b45405b7a 100644 --- a/README.rst +++ b/README.rst @@ -385,6 +385,11 @@ Configure celery (you may use a different broker if you want): 'args': (None,), 'relative': True, }, + # Delete old WifiSession + 'delete_wifi_clients_and_sessions': { + 'task': 'openwisp_monitoring.monitoring.tasks.delete_wifi_clients_and_sessions', + 'schedule': timedelta(days=180), + }, } INSTALLED_APPS.append('djcelery_email') @@ -1063,23 +1068,19 @@ Add celery beat configuration for iperf check in `openwisp settings Date: Wed, 13 Jul 2022 17:57:07 +0530 Subject: [PATCH 35/64] [feature] Added initial alert settings for iperf check - Added alert settings for iperf check on the iperf_result field just for testing purpose. --- openwisp_monitoring/check/classes/iperf.py | 7 ++--- .../monitoring/configuration.py | 27 +++++++++++++++++++ 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 09f88fb2d..dbd77a6c2 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -17,8 +17,6 @@ Chart = load_model('monitoring', 'Chart') Metric = load_model('monitoring', 'Metric') -Device = load_model('config', 'Device') -Credentials = load_model('connection', 'Credentials') AlertSettings = load_model('monitoring', 'AlertSettings') DeviceConnection = load_model('connection', 'DeviceConnection') @@ -224,6 +222,7 @@ def _get_metric(self): """ metric, created = self._get_or_create_metric() if created: + self._create_alert_settings(metric) self._create_charts(metric) return metric @@ -247,4 +246,6 @@ def _create_charts(self, metric): chart.save() def _create_alert_settings(self, metric): - pass + alert_settings = AlertSettings(metric=metric) + alert_settings.full_clean() + alert_settings.save() diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 2b47bda3c..e2f5a340f 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -668,6 +668,33 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[8]], }, }, + 'alert_settings': {'operator': '<', 'threshold': 1, 'tolerance': 0}, + 'notification': { + 'problem': { + 'verbose_name': 'Iperf PROBLEM', + 'verb': _('iperf test is experiencing some problem'), + 'level': 'warning', + 'email_subject': _( + '[{site.name}] PROBLEM: {notification.target} {notification.verb}' + ), + 'message': _( + 'The device [{notification.target}]({notification.target_link}) ' + '{notification.verb}.' + ), + }, + 'recovery': { + 'verbose_name': 'Iperf RECOVERY', + 'verb': _('iperf test now back to normal'), + 'level': 'info', + 'email_subject': _( + '[{site.name}] RECOVERY: {notification.target} {notification.verb}' + ), + 'message': _( + 'The device [{notification.target}]({notification.target_link}) ' + '{notification.verb}.' + ), + }, + }, }, } From 391537e80877680fd06f7894aee7fa742c94a4e4 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 13 Jul 2022 21:47:41 +0530 Subject: [PATCH 36/64] [tests] Added test_iperf_check_alert_notification --- openwisp_monitoring/check/tests/test_iperf.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 91bb93ea9..1d1f4b155 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -19,6 +19,7 @@ AlertSettings = load_model('monitoring', 'AlertSettings') Metric = load_model('monitoring', 'Metric') Check = load_model('check', 'Check') +Notification = load_model('openwisp_notifications', 'Notification') class TestIperf(CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTestCase): @@ -476,3 +477,34 @@ def test_iperf_check(self): mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) + + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + @patch.object(iperf_logger, 'warning') + def test_iperf_check_alert_notification( + self, mock_warn, mock_get_iperf_servers, mock_exec_command + ): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + check, _ = self._create_iperf_test_env() + device = self.device + self.assertEqual(Notification.objects.count(), 0) + self.assertEqual(AlertSettings.objects.count(), 0) + check.perform_check() + self.assertEqual(Notification.objects.count(), 0) + self.assertEqual(AlertSettings.objects.count(), 1) + self.assertEqual(device.monitoring.status, 'unknown') + iperf_metric = Metric.objects.get(key='iperf') + # write value less than threshold + iperf_metric.write(0) + device.monitoring.refresh_from_db() + self.assertEqual(device.monitoring.status, 'problem') + # No alert notification ('problem') + self.assertEqual(Notification.objects.count(), 0) + # write within threshold + iperf_metric.write(1) + device.monitoring.refresh_from_db() + self.assertEqual(device.monitoring.status, 'ok') + # No alert notification ('recovery') + self.assertEqual(Notification.objects.count(), 0) From 9810d2470b788385018e57fd93a34c8536a761fb Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Thu, 14 Jul 2022 18:36:35 +0530 Subject: [PATCH 37/64] [feature] Added alert for related_field (Iperf jitter) #406 Closes #406 --- openwisp_monitoring/check/tests/test_iperf.py | 61 ++++++++++--------- openwisp_monitoring/monitoring/base/models.py | 26 +++++++- .../monitoring/configuration.py | 7 ++- 3 files changed, 60 insertions(+), 34 deletions(-) diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 1d1f4b155..39b46c924 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -478,33 +478,34 @@ def test_iperf_check(self): self.device.organization.id ) - @patch.object(Iperf, '_exec_command') - @patch.object( - Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] - ) - @patch.object(iperf_logger, 'warning') - def test_iperf_check_alert_notification( - self, mock_warn, mock_get_iperf_servers, mock_exec_command - ): - mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - check, _ = self._create_iperf_test_env() - device = self.device - self.assertEqual(Notification.objects.count(), 0) - self.assertEqual(AlertSettings.objects.count(), 0) - check.perform_check() - self.assertEqual(Notification.objects.count(), 0) - self.assertEqual(AlertSettings.objects.count(), 1) - self.assertEqual(device.monitoring.status, 'unknown') - iperf_metric = Metric.objects.get(key='iperf') - # write value less than threshold - iperf_metric.write(0) - device.monitoring.refresh_from_db() - self.assertEqual(device.monitoring.status, 'problem') - # No alert notification ('problem') - self.assertEqual(Notification.objects.count(), 0) - # write within threshold - iperf_metric.write(1) - device.monitoring.refresh_from_db() - self.assertEqual(device.monitoring.status, 'ok') - # No alert notification ('recovery') - self.assertEqual(Notification.objects.count(), 0) + # @patch.object(Iperf, '_exec_command') + # @patch.object( + # Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + # ) + # @patch.object(iperf_logger, 'warning') + # def test_iperf_check_alert_notification( + # self, mock_warn, mock_get_iperf_servers, mock_exec_command + # ): + # mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + # admin = self._create_admin() + # check, _ = self._create_iperf_test_env() + # device = self.device + # self.assertEqual(Notification.objects.count(), 0) + # self.assertEqual(AlertSettings.objects.count(), 0) + # check.perform_check() + # self.assertEqual(Notification.objects.count(), 0) + # self.assertEqual(AlertSettings.objects.count(), 1) + # self.assertEqual(device.monitoring.status, 'unknown') + # iperf_metric = Metric.objects.get(key='iperf') + # # write value less than threshold + # iperf_metric.write(0) + # device.monitoring.refresh_from_db() + # self.assertEqual(device.monitoring.status, 'problem') + # # No alert notification ('problem') + # self.assertEqual(Notification.objects.count(), 1) + # # write within threshold + # iperf_metric.write(1) + # device.monitoring.refresh_from_db() + # self.assertEqual(device.monitoring.status, 'ok') + # # No alert notification ('recovery') + # self.assertEqual(Notification.objects.count(), 2) diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 863e46a3f..7f8d2f5e6 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -355,6 +355,16 @@ def write( 'send_alert': send_alert, } options['metric_pk'] = self.pk + + if extra_values and isinstance(extra_values, dict): + for key in extra_values.keys(): + if not self.related_fields or key not in self.related_fields: + raise ValueError(f'"{key}" not defined in metric configuration') + if 'alert_on_related_field' in self.config_dict: + related_field = self.config_dict['alert_on_related_field'] + options['check_threshold_kwargs'].update( + {'value': extra_values[related_field]} + ) timeseries_write.delay(name=self.key, values=values, **options) def read(self, **kwargs): @@ -748,6 +758,10 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): return value_crossed # tolerance is set, we must go back in time # to ensure the threshold is trepassed for enough time + if 'alert_on_related_field' in self.config_dict: + alert_on_related_field = [self.metric.config_dict['alert_on_related_field']] + else: + alert_on_related_field = [] if time is None: # retrieves latest measurements, ordered by most recent first points = self.metric.read( @@ -755,6 +769,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): limit=None, order='-time', retention_policy=retention_policy, + extra_values=alert_on_related_field, ) # store a list with the results results = [value_crossed] @@ -766,7 +781,16 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): continue utc_time = utc.localize(datetime.utcfromtimestamp(point['time'])) # did this point cross the threshold? Append to result list - results.append(self._value_crossed(point[self.metric.field_name])) + # alert_on_related_field + + if 'alert_on_related_field' in self.config_dict: + results.append( + self._value_crossed( + point[self.metric.config_dict['alert_on_related_field']] + ) + ) + else: + results.append(self._value_crossed(point[self.metric.field_name])) # tolerance is trepassed if self._time_crossed(utc_time): # if the latest results are consistent, the metric being diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index e2f5a340f..03d08821d 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -560,6 +560,7 @@ def _get_access_tech(): 'lost_packets', 'lost_percent', ], + 'alert_on_related_field': 'jitter', 'charts': { 'bandwidth_tcp': { 'type': 'stackedbar', @@ -668,11 +669,11 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[8]], }, }, - 'alert_settings': {'operator': '<', 'threshold': 1, 'tolerance': 0}, + 'alert_settings': {'operator': '>', 'threshold': 5, 'tolerance': 0}, 'notification': { 'problem': { 'verbose_name': 'Iperf PROBLEM', - 'verb': _('iperf test is experiencing some problem'), + 'verb': _('iperf test jitter is greater than normal value'), 'level': 'warning', 'email_subject': _( '[{site.name}] PROBLEM: {notification.target} {notification.verb}' @@ -684,7 +685,7 @@ def _get_access_tech(): }, 'recovery': { 'verbose_name': 'Iperf RECOVERY', - 'verb': _('iperf test now back to normal'), + 'verb': _('iperf test jitter now back to normal'), 'level': 'info', 'email_subject': _( '[{site.name}] RECOVERY: {notification.target} {notification.verb}' From d219e9f667377d616e7935613b4f0c6039dc675a Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Thu, 14 Jul 2022 23:57:40 +0530 Subject: [PATCH 38/64] [changes] Improved alert_on_related field --- openwisp_monitoring/monitoring/base/models.py | 21 ++++++++++--------- .../monitoring/configuration.py | 4 ++++ 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 7f8d2f5e6..917ea29df 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -356,15 +356,16 @@ def write( } options['metric_pk'] = self.pk - if extra_values and isinstance(extra_values, dict): - for key in extra_values.keys(): - if not self.related_fields or key not in self.related_fields: - raise ValueError(f'"{key}" not defined in metric configuration') - if 'alert_on_related_field' in self.config_dict: - related_field = self.config_dict['alert_on_related_field'] - options['check_threshold_kwargs'].update( - {'value': extra_values[related_field]} + # check if alert_on_related_field is present in metric configuration + if 'alert_on_related_field' in self.config_dict: + related_field = self.config_dict['alert_on_related_field'] + if not extra_values: + raise ValueError( + 'write() missing positional argument: "extra_values" required for alert on related field' ) + options['check_threshold_kwargs'].update( + {'value': extra_values[related_field]} + ) timeseries_write.delay(name=self.key, values=values, **options) def read(self, **kwargs): @@ -758,6 +759,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): return value_crossed # tolerance is set, we must go back in time # to ensure the threshold is trepassed for enough time + # check if alert_on_related_field is present in metric configuration if 'alert_on_related_field' in self.config_dict: alert_on_related_field = [self.metric.config_dict['alert_on_related_field']] else: @@ -781,8 +783,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): continue utc_time = utc.localize(datetime.utcfromtimestamp(point['time'])) # did this point cross the threshold? Append to result list - # alert_on_related_field - + # check if alert_on_related_field is present in metric configuration if 'alert_on_related_field' in self.config_dict: results.append( self._value_crossed( diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 03d08821d..44c5d792c 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -709,6 +709,10 @@ def _validate_metric_configuration(metric_config): assert 'name' in metric_config assert 'key' in metric_config assert 'field_name' in metric_config + if 'alert_on_related_field' in metric_config: + assert ( + metric_config['alert_on_related_field'] in metric_config['related_fields'] + ) def _validate_chart_configuration(chart_config): From 808288118df92cff2bc444e9235af5d84746ff46 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 15 Jul 2022 01:47:40 +0530 Subject: [PATCH 39/64] [tests] Added tests for alert_on_related_field --- openwisp_monitoring/monitoring/base/models.py | 4 + .../monitoring/tests/__init__.py | 9 +++ .../tests/test_monitoring_notifications.py | 80 +++++++++++++++++++ 3 files changed, 93 insertions(+) diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 917ea29df..6bb707159 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -363,6 +363,10 @@ def write( raise ValueError( 'write() missing positional argument: "extra_values" required for alert on related field' ) + if related_field not in extra_values.keys(): + raise ValueError( + f'"{key}" is not defined for alert_on_related_field in metric configuration' + ) options['check_threshold_kwargs'].update( {'value': extra_values[related_field]} ) diff --git a/openwisp_monitoring/monitoring/tests/__init__.py b/openwisp_monitoring/monitoring/tests/__init__.py index 83ccfd5b1..bff66abc5 100644 --- a/openwisp_monitoring/monitoring/tests/__init__.py +++ b/openwisp_monitoring/monitoring/tests/__init__.py @@ -51,6 +51,15 @@ 'label': 'Test Metric', 'notification': test_notification, }, + 'test_alert_on_rf': { + 'name': 'test_alert_related', + 'key': '{key}', + 'field_name': '{field_name}', + 'label': 'Test alert related', + 'notification': test_notification, + 'related_fields': ['test_related_1', 'test_related_2', 'test_related_3'], + 'alert_on_related_field': 'test_related_2', + }, 'top_fields_mean': { 'name': 'top_fields_mean_test', 'key': '{key}', diff --git a/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py b/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py index 81ffac53e..5883425fa 100644 --- a/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py +++ b/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py @@ -402,6 +402,86 @@ def test_alerts_disabled(self): self.assertEqual(d.monitoring.status, 'problem') self.assertEqual(Notification.objects.count(), 0) + def test_alert_on_related_field(self): + admin = self._create_admin() + m = self._create_general_metric(configuration='test_alert_on_rf') + self._create_alert_settings( + metric=m, custom_operator='>', custom_threshold=30, custom_tolerance=0 + ) + + with self.subTest('Test notification for metric without related field'): + with self.assertRaises(ValueError) as err: + m.write(1) + self.assertEqual( + str(err.exception), + 'write() missing positional argument: "extra_values" required for alert on related field', + ) + m.refresh_from_db() + self.assertEqual(m.is_healthy, True) + self.assertEqual(m.is_healthy_tolerant, True) + self.assertEqual(Notification.objects.count(), 0) + + with self.subTest('Test notification for metric on different related field'): + with self.assertRaises(ValueError) as err: + m.write(10, extra_values={'test_related_3': 40}) + self.assertEqual( + str(err.exception), + '"test_related_3" is not defined for alert_on_related_field in metric configuration', + ) + m.refresh_from_db() + self.assertEqual(m.is_healthy, True) + self.assertEqual(m.is_healthy_tolerant, True) + self.assertEqual(Notification.objects.count(), 0) + + with self.subTest('Test notification for metric with multiple related fields'): + m.write(10, extra_values={'test_related_2': 40, 'test_related_3': 20}) + m.refresh_from_db() + self.assertEqual(m.is_healthy, False) + self.assertEqual(m.is_healthy_tolerant, False) + self.assertEqual(Notification.objects.count(), 1) + n = notification_queryset.first() + self.assertEqual(n.recipient, admin) + self.assertEqual(n.actor, m) + self.assertEqual(n.action_object, m.alertsettings) + self.assertEqual(n.level, 'warning') + + with self.subTest( + 'Test notification for metric exceeding related field alert settings' + ): + m.write(10, extra_values={'test_related_2': 40}) + m.refresh_from_db() + self.assertEqual(m.is_healthy, False) + self.assertEqual(m.is_healthy_tolerant, False) + self.assertEqual(Notification.objects.count(), 1) + n = notification_queryset.first() + self.assertEqual(n.recipient, admin) + self.assertEqual(n.actor, m) + self.assertEqual(n.action_object, m.alertsettings) + self.assertEqual(n.level, 'warning') + + with self.subTest( + 'Test no double alarm for metric exceeding related field alert settings' + ): + m.write(20, extra_values={'test_related_2': 35}) + m.refresh_from_db() + self.assertEqual(m.is_healthy, False) + self.assertEqual(m.is_healthy_tolerant, False) + self.assertEqual(Notification.objects.count(), 1) + + with self.subTest( + 'Test notification for metric falling behind related field alert settings' + ): + m.write(30, extra_values={'test_related_2': 25}) + m.refresh_from_db() + self.assertEqual(m.is_healthy, True) + self.assertEqual(m.is_healthy_tolerant, True) + self.assertEqual(Notification.objects.count(), 2) + n = notification_queryset.last() + self.assertEqual(n.recipient, admin) + self.assertEqual(n.actor, m) + self.assertEqual(n.action_object, m.alertsettings) + self.assertEqual(n.level, 'info') + class TestTransactionMonitoringNotifications(DeviceMonitoringTransactionTestcase): device_model = Device From e4d6061e4d6bee24e7e6d9c92a86f18e2f596bd1 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 15 Jul 2022 02:11:04 +0530 Subject: [PATCH 40/64] [docs] Added alert_on_related_field --- README.rst | 6 ++++ openwisp_monitoring/check/tests/test_iperf.py | 32 ------------------- 2 files changed, 6 insertions(+), 32 deletions(-) diff --git a/README.rst b/README.rst index b45405b7a..e8f7d42e7 100644 --- a/README.rst +++ b/README.rst @@ -1821,6 +1821,12 @@ The ``AlertSettings`` of ``ping`` metric will by default use ``threshold`` and ` defined in the ``alert_settings`` key. You can always override them and define your own custom values via the *admin*. +You can also use ``alert_on_related_field`` key in metric configuration +which allows ``AlertSettings`` to use ``related_field`` as value to check ``threshold`` instead +of default ``field_name`` key. A real world example of this can be seen in +`Iperf metric configuration `_, +Where we used ``jitter`` (related_field) for alerts. + **Note**: It will raise ``ImproperlyConfigured`` exception if a metric configuration is already registered with same name (not to be confused with verbose_name). diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 39b46c924..e1a053081 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -477,35 +477,3 @@ def test_iperf_check(self): mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) - - # @patch.object(Iperf, '_exec_command') - # @patch.object( - # Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] - # ) - # @patch.object(iperf_logger, 'warning') - # def test_iperf_check_alert_notification( - # self, mock_warn, mock_get_iperf_servers, mock_exec_command - # ): - # mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - # admin = self._create_admin() - # check, _ = self._create_iperf_test_env() - # device = self.device - # self.assertEqual(Notification.objects.count(), 0) - # self.assertEqual(AlertSettings.objects.count(), 0) - # check.perform_check() - # self.assertEqual(Notification.objects.count(), 0) - # self.assertEqual(AlertSettings.objects.count(), 1) - # self.assertEqual(device.monitoring.status, 'unknown') - # iperf_metric = Metric.objects.get(key='iperf') - # # write value less than threshold - # iperf_metric.write(0) - # device.monitoring.refresh_from_db() - # self.assertEqual(device.monitoring.status, 'problem') - # # No alert notification ('problem') - # self.assertEqual(Notification.objects.count(), 1) - # # write within threshold - # iperf_metric.write(1) - # device.monitoring.refresh_from_db() - # self.assertEqual(device.monitoring.status, 'ok') - # # No alert notification ('recovery') - # self.assertEqual(Notification.objects.count(), 2) From df31203af6be0dc0876d38b683d67037163eda89 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 18 Jul 2022 12:45:12 +0530 Subject: [PATCH 41/64] [requested-changes] Remove is_working filter, formatted test json - Remove is_working filter from getdeviceconneciton method. - Formatted iperf test json. - Added line breaks in docs. --- README.rst | 10 +- openwisp_monitoring/check/classes/iperf.py | 1 - .../check/tests/iperf_test_result.py | 1389 +++++++++-------- openwisp_monitoring/check/tests/test_iperf.py | 8 - 4 files changed, 738 insertions(+), 670 deletions(-) diff --git a/README.rst b/README.rst index e8f7d42e7..60bfcbb17 100644 --- a/README.rst +++ b/README.rst @@ -1027,7 +1027,8 @@ How to configure iperf check 1. Register your device to OpenWISP ################################### -Register your device to OpenWISP and make sure `iperf3 openwrt package `_ is installed on the device if not run : +Register your device to OpenWISP and make sure `iperf3 openwrt package +`_ is installed on the device if not run : .. code-block:: shell @@ -1036,8 +1037,8 @@ Register your device to OpenWISP and make sure `iperf3 openwrt package `_ -to allow SSH access to you device from OpenWISP. +Follow the steps in `"How to configure push updates" section of the openwisp-controller documentation +`_ to allow SSH access to you device from OpenWISP. **Note:** Make sure device connection is enabled & working with right update strategy i.e. ``OpenWRT SSH``. @@ -1063,7 +1064,8 @@ For example. 'c9734710-db30-46b0-a2fc-01f01046fe4f': ['2001:db8::1'], } -Add celery beat configuration for iperf check in `openwisp settings `_ +Add celery beat configuration for iperf check in `openwisp settings +`_ .. code-block:: python diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index dbd77a6c2..1a0f23975 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -121,7 +121,6 @@ def _get_device_connection(self, device): device_id=device.id, update_strategy=openwrt_ssh, enabled=True, - is_working=True, ).first() return device_connection diff --git a/openwisp_monitoring/check/tests/iperf_test_result.py b/openwisp_monitoring/check/tests/iperf_test_result.py index 20b8a2ded..aa98c0763 100644 --- a/openwisp_monitoring/check/tests/iperf_test_result.py +++ b/openwisp_monitoring/check/tests/iperf_test_result.py @@ -1,662 +1,737 @@ # flake8: noqa -RESULT_TCP = """{ - "start": { - "connected": [{ - "socket": 5, - "local_host": "127.0.0.1", - "local_port": 54966, - "remote_host": "127.0.0.1", - "remote_port": 5201 - }], - "version": "iperf 3.9", - "system_info": "Linux openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", - "timestamp": { - "time": "Thu, 30 Jun 2022 21:39:55 GMT", - "timesecs": 1656625195 - }, - "connecting_to": { - "host": "localhost", - "port": 5201 - }, - "cookie": "npx4ad65t3j4wginxr4a7mqedmkhhspx3sob", - "tcp_mss_default": 32768, - "sock_bufsize": 0, - "sndbuf_actual": 16384, - "rcvbuf_actual": 131072, - "test_start": { - "protocol": "TCP", - "num_streams": 1, - "blksize": 131072, - "omit": 0, - "duration": 10, - "bytes": 0, - "blocks": 0, - "reverse": 0, - "tos": 0 - } - }, - "intervals": [{ - "streams": [{ - "socket": 5, - "start": 0, - "end": 1.000048, - "seconds": 1.000048041343689, - "bytes": 5790760960, - "bits_per_second": 46323862219.414116, - "retransmits": 0, - "snd_cwnd": 1506109, - "rtt": 22, - "rttvar": 3, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 0, - "end": 1.000048, - "seconds": 1.000048041343689, - "bytes": 5790760960, - "bits_per_second": 46323862219.414116, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 1.000048, - "end": 2.000185, - "seconds": 1.0001369714736938, - "bytes": 5463080960, - "bits_per_second": 43698662209.838669, - "retransmits": 0, - "snd_cwnd": 2160939, - "rtt": 22, - "rttvar": 3, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 1.000048, - "end": 2.000185, - "seconds": 1.0001369714736938, - "bytes": 5463080960, - "bits_per_second": 43698662209.838669, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 2.000185, - "end": 3.00019, - "seconds": 1.0000050067901611, - "bytes": 5679349760, - "bits_per_second": 45434570598.638954, - "retransmits": 0, - "snd_cwnd": 2553837, - "rtt": 21, - "rttvar": 1, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 2.000185, - "end": 3.00019, - "seconds": 1.0000050067901611, - "bytes": 5679349760, - "bits_per_second": 45434570598.638954, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 3.00019, - "end": 4.000232, - "seconds": 1.0000419616699219, - "bytes": 5710807040, - "bits_per_second": 45684539320.4405, - "retransmits": 0, - "snd_cwnd": 2553837, - "rtt": 24, - "rttvar": 5, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 3.00019, - "end": 4.000232, - "seconds": 1.0000419616699219, - "bytes": 5710807040, - "bits_per_second": 45684539320.4405, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 4.000232, - "end": 5.000158, - "seconds": 0.999925971031189, - "bytes": 5307105280, - "bits_per_second": 42459985508.942955, - "retransmits": 0, - "snd_cwnd": 3208667, - "rtt": 27, - "rttvar": 4, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 4.000232, - "end": 5.000158, - "seconds": 0.999925971031189, - "bytes": 5307105280, - "bits_per_second": 42459985508.942955, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 5.000158, - "end": 6.000229, - "seconds": 1.0000710487365723, - "bytes": 5308416000, - "bits_per_second": 42464310964.356567, - "retransmits": 0, - "snd_cwnd": 3208667, - "rtt": 28, - "rttvar": 1, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 5.000158, - "end": 6.000229, - "seconds": 1.0000710487365723, - "bytes": 5308416000, - "bits_per_second": 42464310964.356567, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 6.000229, - "end": 7.000056, - "seconds": 0.99982702732086182, - "bytes": 5241569280, - "bits_per_second": 41939808681.0701, - "retransmits": 0, - "snd_cwnd": 3208667, - "rtt": 23, - "rttvar": 4, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 6.000229, - "end": 7.000056, - "seconds": 0.99982702732086182, - "bytes": 5241569280, - "bits_per_second": 41939808681.0701, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 7.000056, - "end": 8.000202, - "seconds": 1.0001460313797, - "bytes": 5734400000, - "bits_per_second": 45868501759.403313, - "retransmits": 0, - "snd_cwnd": 3208667, - "rtt": 22, - "rttvar": 1, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 7.000056, - "end": 8.000202, - "seconds": 1.0001460313797, - "bytes": 5734400000, - "bits_per_second": 45868501759.403313, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 8.000202, - "end": 9.0003, - "seconds": 1.0000979900360107, - "bytes": 5415895040, - "bits_per_second": 43322915105.98867, - "retransmits": 0, - "snd_cwnd": 3208667, - "rtt": 35, - "rttvar": 12, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 8.000202, - "end": 9.0003, - "seconds": 1.0000979900360107, - "bytes": 5415895040, - "bits_per_second": 43322915105.98867, - "retransmits": 0, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 9.0003, - "end": 10.000218, - "seconds": 0.999917984008789, - "bytes": 5402787840, - "bits_per_second": 43225847930.763977, - "retransmits": 0, - "snd_cwnd": 3208667, - "rtt": 26, - "rttvar": 17, - "pmtu": 65535, - "omitted": false, - "sender": true - }], - "sum": { - "start": 9.0003, - "end": 10.000218, - "seconds": 0.999917984008789, - "bytes": 5402787840, - "bits_per_second": 43225847930.763977, - "retransmits": 0, - "omitted": false, - "sender": true - } - }], - "end": { - "streams": [{ - "sender": { - "socket": 5, - "start": 0, - "end": 10.000218, - "seconds": 10.000218, - "bytes": 55054172160, - "bits_per_second": 44042377604.168228, - "retransmits": 0, - "max_snd_cwnd": 3208667, - "max_rtt": 35, - "min_rtt": 21, - "mean_rtt": 25, - "sender": true - }, - "receiver": { - "socket": 5, - "start": 0, - "end": 10.000272, - "seconds": 10.000218, - "bytes": 55054172160, - "bits_per_second": 44042139781.797935, - "sender": true - } - }], - "sum_sent": { - "start": 0, - "end": 10.000218, - "seconds": 10.000218, - "bytes": 55054172160, - "bits_per_second": 44042377604.168228, - "retransmits": 0, - "sender": true - }, - "sum_received": { - "start": 0, - "end": 10.000272, - "seconds": 10.000272, - "bytes": 55054172160, - "bits_per_second": 44042139781.797935, - "sender": true - }, - "cpu_utilization_percent": { - "host_total": 99.498820810699755, - "host_user": 0.66204905391509139, - "host_system": 98.83676176238454, - "remote_total": 0.377797593572381, - "remote_user": 0.02174276147834767, - "remote_system": 0.35605477540538377 - }, - "sender_tcp_congestion": "cubic", - "receiver_tcp_congestion": "cubic" - } -}""" - -RESULT_UDP = """{ - "start": { - "connected": [{ - "socket": 5, - "local_host": "127.0.0.1", - "local_port": 54477, - "remote_host": "127.0.0.1", - "remote_port": 5201 - }], - "version": "iperf 3.9", - "system_info": "openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", - "timestamp": { - "time": "Thu, 30 Jun 2022 21:10:31 GMT", - "timesecs": 1656623431 - }, - "connecting_to": { - "host": "localhost", - "port": 5201 - }, - "cookie": "kvuxkz3ncutquvpl2evufmdkn726molzocot", - "sock_bufsize": 0, - "sndbuf_actual": 212992, - "rcvbuf_actual": 212992, - "test_start": { - "protocol": "UDP", - "num_streams": 1, - "blksize": 32768, - "omit": 0, - "duration": 10, - "bytes": 0, - "blocks": 0, - "reverse": 0, - "tos": 0 - } - }, - "intervals": [{ - "streams": [{ - "socket": 5, - "start": 0, - "end": 1.000057, - "seconds": 1.0000569820404053, - "bytes": 131072, - "bits_per_second": 1048516.253404483, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 0, - "end": 1.000057, - "seconds": 1.0000569820404053, - "bytes": 131072, - "bits_per_second": 1048516.253404483, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 1.000057, - "end": 2.000079, - "seconds": 1.0000220537185669, - "bytes": 131072, - "bits_per_second": 1048552.8755099809, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 1.000057, - "end": 2.000079, - "seconds": 1.0000220537185669, - "bytes": 131072, - "bits_per_second": 1048552.8755099809, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 2.000079, - "end": 3.000079, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 2.000079, - "end": 3.000079, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 3.000079, - "end": 4.000079, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 3.000079, - "end": 4.000079, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 4.000079, - "end": 5.000182, - "seconds": 1.0001029968261719, - "bytes": 131072, - "bits_per_second": 1048468.0111225117, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 4.000079, - "end": 5.000182, - "seconds": 1.0001029968261719, - "bytes": 131072, - "bits_per_second": 1048468.0111225117, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 5.000182, - "end": 6.000056, - "seconds": 0.99987399578094482, - "bytes": 131072, - "bits_per_second": 1048708.1416504055, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 5.000182, - "end": 6.000056, - "seconds": 0.99987399578094482, - "bytes": 131072, - "bits_per_second": 1048708.1416504055, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 6.000056, - "end": 7.000056, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 6.000056, - "end": 7.000056, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 7.000056, - "end": 8.000056, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 7.000056, - "end": 8.000056, - "seconds": 1, - "bytes": 131072, - "bits_per_second": 1048576, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 8.000056, - "end": 9.000057, - "seconds": 1.0000009536743164, - "bytes": 131072, - "bits_per_second": 1048575.0000009537, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 8.000056, - "end": 9.000057, - "seconds": 1.0000009536743164, - "bytes": 131072, - "bits_per_second": 1048575.0000009537, - "packets": 4, - "omitted": false, - "sender": true - } - }, { - "streams": [{ - "socket": 5, - "start": 9.000057, - "end": 10.00006, - "seconds": 1.0000029802322388, - "bytes": 131072, - "bits_per_second": 1048572.8750093132, - "packets": 4, - "omitted": false, - "sender": true - }], - "sum": { - "start": 9.000057, - "end": 10.00006, - "seconds": 1.0000029802322388, - "bytes": 131072, - "bits_per_second": 1048572.8750093132, - "packets": 4, - "omitted": false, - "sender": true - } - }], - "end": { - "streams": [{ - "udp": { - "socket": 5, - "start": 0, - "end": 10.00006, - "seconds": 10.00006, - "bytes": 1310720, - "bits_per_second": 1048569.7085817485, - "jitter_ms": 0.011259258240784126, - "lost_packets": 0, - "packets": 40, - "lost_percent": 0, - "out_of_order": 0, - "sender": true - } - }], - "sum": { - "start": 0, - "end": 10.000115, - "seconds": 10.000115, - "bytes": 1310720, - "bits_per_second": 1048569.7085817485, - "jitter_ms": 0.011259258240784126, - "lost_packets": 0, - "packets": 40, - "lost_percent": 0, - "sender": true - }, - "cpu_utilization_percent": { - "host_total": 0.6057128493969417, - "host_user": 0, - "host_system": 0.6057128493969417, - "remote_total": 0.016163250220207454, - "remote_user": 0.01616789349806445, - "remote_system": 0 - } - } -}""" +RESULT_TCP = """ +{ + "start": { + "connected": [ + { + "socket": 5, + "local_host": "127.0.0.1", + "local_port": 54966, + "remote_host": "127.0.0.1", + "remote_port": 5201 + } + ], + "version": "iperf 3.9", + "system_info": "Linux openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", + "timestamp": { + "time": "Thu, 30 Jun 2022 21:39:55 GMT", + "timesecs": 1656625195 + }, + "connecting_to": { + "host": "localhost", + "port": 5201 + }, + "cookie": "npx4ad65t3j4wginxr4a7mqedmkhhspx3sob", + "tcp_mss_default": 32768, + "sock_bufsize": 0, + "sndbuf_actual": 16384, + "rcvbuf_actual": 131072, + "test_start": { + "protocol": "TCP", + "num_streams": 1, + "blksize": 131072, + "omit": 0, + "duration": 10, + "bytes": 0, + "blocks": 0, + "reverse": 0, + "tos": 0 + } + }, + "intervals": [ + { + "streams": [ + { + "socket": 5, + "start": 0, + "end": 1.000048, + "seconds": 1.000048041343689, + "bytes": 5790760960, + "bits_per_second": 46323862219.414116, + "retransmits": 0, + "snd_cwnd": 1506109, + "rtt": 22, + "rttvar": 3, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 0, + "end": 1.000048, + "seconds": 1.000048041343689, + "bytes": 5790760960, + "bits_per_second": 46323862219.414116, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 1.000048, + "end": 2.000185, + "seconds": 1.0001369714736938, + "bytes": 5463080960, + "bits_per_second": 43698662209.83867, + "retransmits": 0, + "snd_cwnd": 2160939, + "rtt": 22, + "rttvar": 3, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 1.000048, + "end": 2.000185, + "seconds": 1.0001369714736938, + "bytes": 5463080960, + "bits_per_second": 43698662209.83867, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 2.000185, + "end": 3.00019, + "seconds": 1.0000050067901611, + "bytes": 5679349760, + "bits_per_second": 45434570598.638954, + "retransmits": 0, + "snd_cwnd": 2553837, + "rtt": 21, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 2.000185, + "end": 3.00019, + "seconds": 1.0000050067901611, + "bytes": 5679349760, + "bits_per_second": 45434570598.638954, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 3.00019, + "end": 4.000232, + "seconds": 1.0000419616699219, + "bytes": 5710807040, + "bits_per_second": 45684539320.4405, + "retransmits": 0, + "snd_cwnd": 2553837, + "rtt": 24, + "rttvar": 5, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 3.00019, + "end": 4.000232, + "seconds": 1.0000419616699219, + "bytes": 5710807040, + "bits_per_second": 45684539320.4405, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 4.000232, + "end": 5.000158, + "seconds": 0.999925971031189, + "bytes": 5307105280, + "bits_per_second": 42459985508.942955, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 27, + "rttvar": 4, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 4.000232, + "end": 5.000158, + "seconds": 0.999925971031189, + "bytes": 5307105280, + "bits_per_second": 42459985508.942955, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 5.000158, + "end": 6.000229, + "seconds": 1.0000710487365723, + "bytes": 5308416000, + "bits_per_second": 42464310964.35657, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 28, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 5.000158, + "end": 6.000229, + "seconds": 1.0000710487365723, + "bytes": 5308416000, + "bits_per_second": 42464310964.35657, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 6.000229, + "end": 7.000056, + "seconds": 0.9998270273208618, + "bytes": 5241569280, + "bits_per_second": 41939808681.0701, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 23, + "rttvar": 4, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 6.000229, + "end": 7.000056, + "seconds": 0.9998270273208618, + "bytes": 5241569280, + "bits_per_second": 41939808681.0701, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 7.000056, + "end": 8.000202, + "seconds": 1.0001460313797, + "bytes": 5734400000, + "bits_per_second": 45868501759.40331, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 22, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 7.000056, + "end": 8.000202, + "seconds": 1.0001460313797, + "bytes": 5734400000, + "bits_per_second": 45868501759.40331, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 8.000202, + "end": 9.0003, + "seconds": 1.0000979900360107, + "bytes": 5415895040, + "bits_per_second": 43322915105.98867, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 35, + "rttvar": 12, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 8.000202, + "end": 9.0003, + "seconds": 1.0000979900360107, + "bytes": 5415895040, + "bits_per_second": 43322915105.98867, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 9.0003, + "end": 10.000218, + "seconds": 0.999917984008789, + "bytes": 5402787840, + "bits_per_second": 43225847930.76398, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 26, + "rttvar": 17, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 9.0003, + "end": 10.000218, + "seconds": 0.999917984008789, + "bytes": 5402787840, + "bits_per_second": 43225847930.76398, + "retransmits": 0, + "omitted": false, + "sender": true + } + } + ], + "end": { + "streams": [ + { + "sender": { + "socket": 5, + "start": 0, + "end": 10.000218, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042377604.16823, + "retransmits": 0, + "max_snd_cwnd": 3208667, + "max_rtt": 35, + "min_rtt": 21, + "mean_rtt": 25, + "sender": true + }, + "receiver": { + "socket": 5, + "start": 0, + "end": 10.000272, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042139781.797935, + "sender": true + } + } + ], + "sum_sent": { + "start": 0, + "end": 10.000218, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042377604.16823, + "retransmits": 0, + "sender": true + }, + "sum_received": { + "start": 0, + "end": 10.000272, + "seconds": 10.000272, + "bytes": 55054172160, + "bits_per_second": 44042139781.797935, + "sender": true + }, + "cpu_utilization_percent": { + "host_total": 99.49882081069975, + "host_user": 0.6620490539150914, + "host_system": 98.83676176238454, + "remote_total": 0.377797593572381, + "remote_user": 0.02174276147834767, + "remote_system": 0.35605477540538377 + }, + "sender_tcp_congestion": "cubic", + "receiver_tcp_congestion": "cubic" + } +} +""" -RESULT_FAIL = """{ - "start": { - "connected": [], - "version": "iperf 3.7", - "system_info": "Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64" +RESULT_UDP = """ +{ + "start": { + "connected": [ + { + "socket": 5, + "local_host": "127.0.0.1", + "local_port": 54477, + "remote_host": "127.0.0.1", + "remote_port": 5201 + } + ], + "version": "iperf 3.9", + "system_info": "openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", + "timestamp": { + "time": "Thu, 30 Jun 2022 21:10:31 GMT", + "timesecs": 1656623431 + }, + "connecting_to": { + "host": "localhost", + "port": 5201 + }, + "cookie": "kvuxkz3ncutquvpl2evufmdkn726molzocot", + "sock_bufsize": 0, + "sndbuf_actual": 212992, + "rcvbuf_actual": 212992, + "test_start": { + "protocol": "UDP", + "num_streams": 1, + "blksize": 32768, + "omit": 0, + "duration": 10, + "bytes": 0, + "blocks": 0, + "reverse": 0, + "tos": 0 + } + }, + "intervals": [ + { + "streams": [ + { + "socket": 5, + "start": 0, + "end": 1.000057, + "seconds": 1.0000569820404053, + "bytes": 131072, + "bits_per_second": 1048516.253404483, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 0, + "end": 1.000057, + "seconds": 1.0000569820404053, + "bytes": 131072, + "bits_per_second": 1048516.253404483, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 1.000057, + "end": 2.000079, + "seconds": 1.000022053718567, + "bytes": 131072, + "bits_per_second": 1048552.875509981, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 1.000057, + "end": 2.000079, + "seconds": 1.000022053718567, + "bytes": 131072, + "bits_per_second": 1048552.875509981, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 2.000079, + "end": 3.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 2.000079, + "end": 3.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 3.000079, + "end": 4.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 3.000079, + "end": 4.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } }, - "intervals": [], - "end": { + { + "streams": [ + { + "socket": 5, + "start": 4.000079, + "end": 5.000182, + "seconds": 1.0001029968261719, + "bytes": 131072, + "bits_per_second": 1048468.0111225117, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 4.000079, + "end": 5.000182, + "seconds": 1.0001029968261719, + "bytes": 131072, + "bits_per_second": 1048468.0111225117, + "packets": 4, + "omitted": false, + "sender": true + } }, - "error": "error - unable to connect to server: Connection refused" -}""" + { + "streams": [ + { + "socket": 5, + "start": 5.000182, + "end": 6.000056, + "seconds": 0.9998739957809448, + "bytes": 131072, + "bits_per_second": 1048708.1416504055, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 5.000182, + "end": 6.000056, + "seconds": 0.9998739957809448, + "bytes": 131072, + "bits_per_second": 1048708.1416504055, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 6.000056, + "end": 7.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 6.000056, + "end": 7.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 7.000056, + "end": 8.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 7.000056, + "end": 8.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 8.000056, + "end": 9.000057, + "seconds": 1.0000009536743164, + "bytes": 131072, + "bits_per_second": 1048575.0000009537, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 8.000056, + "end": 9.000057, + "seconds": 1.0000009536743164, + "bytes": 131072, + "bits_per_second": 1048575.0000009537, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 9.000057, + "end": 10.00006, + "seconds": 1.0000029802322388, + "bytes": 131072, + "bits_per_second": 1048572.8750093132, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 9.000057, + "end": 10.00006, + "seconds": 1.0000029802322388, + "bytes": 131072, + "bits_per_second": 1048572.8750093132, + "packets": 4, + "omitted": false, + "sender": true + } + } + ], + "end": { + "streams": [ + { + "udp": { + "socket": 5, + "start": 0, + "end": 10.00006, + "seconds": 10.00006, + "bytes": 1310720, + "bits_per_second": 1048569.7085817485, + "jitter_ms": 0.011259258240784126, + "lost_packets": 0, + "packets": 40, + "lost_percent": 0, + "out_of_order": 0, + "sender": true + } + } + ], + "sum": { + "start": 0, + "end": 10.000115, + "seconds": 10.000115, + "bytes": 1310720, + "bits_per_second": 1048569.7085817485, + "jitter_ms": 0.011259258240784126, + "lost_packets": 0, + "packets": 40, + "lost_percent": 0, + "sender": true + }, + "cpu_utilization_percent": { + "host_total": 0.6057128493969417, + "host_user": 0, + "host_system": 0.6057128493969417, + "remote_total": 0.016163250220207454, + "remote_user": 0.01616789349806445, + "remote_system": 0 + } + } +} +""" + +RESULT_FAIL = """ +{ + "start": { + "connected": [], + "version": "iperf 3.7", + "system_info": "Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64" + }, + "intervals": [], + "end": {}, + "error": "error - unable to connect to server: Connection refused" +} +""" diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index e1a053081..41a15c9af 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -205,14 +205,6 @@ def test_iperf_device_connection(self, mock_warn): f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' ) - with self.subTest('Test device connection not working'): - dc.is_working = False - dc.save() - check.perform_check(store=False) - mock_warn.assert_called_with( - f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' - ) - with self.subTest('Test device connection is not with right update strategy'): dc.update_strategy = UPDATE_STRATEGIES[1][0] dc.is_working = True From 02100af795a1c46c876aadfacf284eb1b43e1c2f Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 18 Jul 2022 13:39:52 +0530 Subject: [PATCH 42/64] [requested-changes] Explicitly specify ping & config checks - Added ping config checks in celery beat configuration. - If args is None executes all checks. --- openwisp_monitoring/check/tasks.py | 4 ++-- tests/openwisp2/settings.py | 9 +++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index dbe4e6722..0a5ed2010 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -25,9 +25,9 @@ def run_checks(checks=None): This allows to enqueue all the checks that need to be performed and execute them in parallel with multiple workers if needed. """ + # If checks is None, We should execute all the checks if checks is None: - # Executes only auto checks ie. ping and config applied - checks = CHECKS_LIST[:2] + checks = CHECKS_LIST if not isinstance(checks, list): raise ImproperlyConfigured(f'Check path {checks} should be of type "list"') diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 1d5421e6f..cbedcca60 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -177,8 +177,14 @@ CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', + # Executes only ping & config check every 5 min 'schedule': timedelta(minutes=5), - 'args': None, + 'args': ( + [ # Checks path + 'openwisp_monitoring.check.classes.Ping', + 'openwisp_monitoring.check.classes.ConfigApplied', + ], + ), 'relative': True, }, 'run_iperf_checks': { @@ -186,7 +192,6 @@ # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules # Executes only iperf check every 5 mins from 00:00 AM to 6:00 AM (night) 'schedule': crontab(minute='*/5', hour='0-6'), - # Check path 'args': (['openwisp_monitoring.check.classes.Iperf'],), 'relative': True, }, From 27732e6652cbca6ccc41e455161a2891f9bb1948 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 18 Jul 2022 14:09:17 +0530 Subject: [PATCH 43/64] [docs] Moved images to docs/1.1 --- README.rst | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/README.rst b/README.rst index 60bfcbb17..fb90f95a8 100644 --- a/README.rst +++ b/README.rst @@ -830,42 +830,42 @@ Iperf **Bandwidth (TCP)**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/bandwidth-tcp.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/bandwidth-tcp.png :align: center **Transfer (TCP)**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/transfer-tcp.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/transfer-tcp.png :align: center **Retransmits**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/retransmits.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/retransmits.png :align: center **Bandwidth (UDP)**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/bandwidth-udp.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/bandwidth-udp.png :align: center **Transfer (UDP)**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/transfer-udp.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/transfer-udp.png :align: center **Jitter**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/jitter.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/jitter.png :align: center **Datagram**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/datagram.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/datagram.png :align: center **Datagram loss**: -.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/datagram-loss.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/datagram-loss.png :align: center Dashboard Monitoring Charts @@ -1042,7 +1042,7 @@ Follow the steps in `"How to configure push updates" section of the openwisp-con **Note:** Make sure device connection is enabled & working with right update strategy i.e. ``OpenWRT SSH``. -.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/enable-openwisp-ssh.png +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/enable-openwrt-ssh.png :alt: Enable ssh access from openwisp to device :align: center @@ -1093,7 +1093,7 @@ This should happen automatically if you have celery running in the background. F run this check manually using the `run_checks <#run_checks>`_ command. After that, you should see the iperf network measurements charts. -.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/iperf-charts.png +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/iperf-charts.png :alt: Iperf network measurement charts Settings From 8a9b82670d654d62ea86147a65904701e5a2bdf9 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 19 Jul 2022 01:37:32 +0530 Subject: [PATCH 44/64] [feature] Added support for iperf3 auth #414 - Added username, password, rsa-public-key-path parameter to DEFAULT_IPERF_CHECK_CONFIG. Closes #414 --- openwisp_monitoring/check/classes/iperf.py | 33 +++++++++++++++++----- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 1a0f23975..263f58607 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -37,6 +37,13 @@ # arbitrary chosen to avoid slowing down the queue (30min) 'maximum': 1800, }, + 'username': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, + 'password': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, + 'rsa-public-key-path': { + 'type': 'string', + 'default': '', + 'pattern': '^(.*/)([^/]*)$', + }, } @@ -45,6 +52,11 @@ def get_iperf_schema(): '$schema': 'http://json-schema.org/draft-07/schema#', 'type': 'object', 'additionalProperties': False, + "dependencies": { + "username": ["password", "rsa-public-key-path"], + "password": ["username", "rsa-public-key-path"], + "rsa-public-key-path": ["username", "password"], + }, } schema['properties'] = deep_merge_dicts( DEFAULT_IPERF_CHECK_CONFIG, app_settings.IPERF_CHECK_CONFIG @@ -70,6 +82,7 @@ def validate_params(self): def check(self, store=True): port = self._get_param('port') time = self._get_param('time') + username = self._get_param('username') device = self.related_object device_connection = self._get_device_connection(device) if not device_connection: @@ -84,12 +97,20 @@ def check(self, store=True): ) return servers = self._get_iperf_servers(device.organization.id) + command_tcp = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' + command_udp = f'iperf3 -c {servers[0]} -p {port} -t {time} -u -J' - # TCP mode - command = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' - result, exit_code = self._exec_command(device_connection, command) + if username: + password = self._get_param('password') + rsa_public_key_path = self._get_param('rsa-public-key-path') + command_tcp = f'IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ + --username "{username}" --rsa-public-key-path {rsa_public_key_path} -J' + command_udp = f'IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ + --username "{username}" --rsa-public-key-path {rsa_public_key_path} -u -J' - # Exit code 127 : command doesn't exist + # TCP mode + result, exit_code = self._exec_command(device_connection, command_tcp) + # # Exit code 127 : command doesn't exist if exit_code == 127: logger.warning( f'Iperf3 is not installed on the "{device}", error - {result.strip()}' @@ -97,10 +118,8 @@ def check(self, store=True): return result_tcp = self._get_iperf_result(result, exit_code, device, mode='TCP') - # UDP mode - command = f'iperf3 -c {servers[0]} -p {port} -t {time} -u -J' - result, exit_code = self._exec_command(device_connection, command) + result, exit_code = self._exec_command(device_connection, command_udp) result_udp = self._get_iperf_result(result, exit_code, device, mode='UDP') if store: From fb1bc269b0950f0f34fbaaacd5b7704ee3fa2c2e Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 19 Jul 2022 19:55:42 +0530 Subject: [PATCH 45/64] [tests] Added tests for iperf auth --- .../check/tests/iperf_test_result.py | 22 ++ openwisp_monitoring/check/tests/test_iperf.py | 214 +++++++++++++++++- 2 files changed, 228 insertions(+), 8 deletions(-) diff --git a/openwisp_monitoring/check/tests/iperf_test_result.py b/openwisp_monitoring/check/tests/iperf_test_result.py index aa98c0763..ac029e156 100644 --- a/openwisp_monitoring/check/tests/iperf_test_result.py +++ b/openwisp_monitoring/check/tests/iperf_test_result.py @@ -735,3 +735,25 @@ "error": "error - unable to connect to server: Connection refused" } """ +RESULT_AUTH_FAIL = """ +{ + "start": { + "connected": [], + "version": "iperf 3.7", + "system_info": "Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64", + "timestamp": { + "time": "Tue, 19 Jul 2022 12:23:38 UTC", + "timesecs": 1658233418 + }, + "connecting_to": { + "host": "192.168.5.109", + "port": 5201 + }, + "cookie": "llz5f6akwyonbtcj3fx4phvfaflohdlvxr4z", + "tcp_mss_default": 1460 + }, + "intervals": [], + "end": {}, + "error": "error - test authorization failed" +} +""" diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 41a15c9af..7185dd490 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -13,7 +13,7 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings from ..classes import Iperf -from .iperf_test_result import RESULT_FAIL, RESULT_TCP, RESULT_UDP +from .iperf_test_result import RESULT_AUTH_FAIL, RESULT_FAIL, RESULT_TCP, RESULT_UDP Chart = load_model('monitoring', 'Chart') AlertSettings = load_model('monitoring', 'AlertSettings') @@ -73,6 +73,24 @@ def _create_iperf_test_env(self): check = Check.objects.get(check_type=self._IPERF) return check, dc + def _set_auth_expected_calls(self, dc, config): + password = config['password']['default'] + username = config['username']['default'] + rsa_publickey_path = config['rsa-public-key-path']['default'] + + self._EXPECTED_COMMAND_CALLS = [ + call( + dc, + f'IPERF3_PASSWORD="{password}" iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 \ + --username "{username}" --rsa-public-key-path {rsa_publickey_path} -J', + ), + call( + dc, + f'IPERF3_PASSWORD="{password}" iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 \ + --username "{username}" --rsa-public-key-path {rsa_publickey_path} -u -J', + ), + ] + @patch.object(Iperf, '_exec_command') @patch.object( Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] @@ -116,17 +134,32 @@ def test_iperf_check_params( tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] check, dc = self._create_iperf_test_env() - test_params = {'port': 6201, 'time': 20} + server = 'iperf.openwisptestserver.com' + test_params = { + 'port': 6201, + 'time': 20, + 'username': 'openwisp-test-user', + 'password': 'openwisp_pass', + 'rsa-public-key-path': './public.pem', + } + time = test_params['time'] + port = test_params['port'] + username = test_params['username'] + password = test_params['password'] + rsa_publickey_path = test_params['rsa-public-key-path'] check.params = test_params check.save() + self._EXPECTED_COMMAND_CALLS = [ call( dc, - f'iperf3 -c iperf.openwisptestserver.com -p {test_params["port"]} -t {test_params["time"]} -J', # noqa + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ + --username "{username}" --rsa-public-key-path {rsa_publickey_path} -J', ), call( dc, - f'iperf3 -c iperf.openwisptestserver.com -p {test_params["port"]} -t {test_params["time"]} -u -J', # noqa + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ + --username "{username}" --rsa-public-key-path {rsa_publickey_path} -u -J', ), ] result = check.perform_check(store=False) @@ -239,7 +272,7 @@ def test_iperf_check_content_object_not_device(self): self.fail('ValidationError not raised') def test_iperf_check_schema_violation(self): - check, _ = self._create_iperf_test_env() + device = self._create_device(organization=self._create_org()) invalid_params = [ {'port': -1232}, {'time': 0}, @@ -252,10 +285,35 @@ def test_iperf_check_schema_violation(self): {'time': 36000}, {'port': ''}, {'time': ''}, + {'username': 121}, + {'password': -323}, + {'rsa-public-key-path': 112}, + {'username': ''}, + {'password': 0}, + {'rsa-public-key-path': '/only_path.pem'}, + { + 'username': 'openwisp-test-user', + 'password': 'open-pass', + 'rsa-public-key-path': '\dir\wrong_path.pem', + }, + { + 'username': 1123, + 'password': 'rossi', + 'rsa-public-key-path': 'root/public.pem', + }, + { + 'username': 'openwisp-test-user', + 'password': -214, + 'rsa-public-key-path': 'root/public.pem', + }, ] - for invalid_params in invalid_params: - check.params = invalid_params - check.save() + for invalid_param in invalid_params: + check = Check( + name='Iperf check', + check_type=self._IPERF, + content_object=device, + params=invalid_param, + ) try: check.check_instance.validate() except ValidationError as e: @@ -469,3 +527,143 @@ def test_iperf_check(self): mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) + + def test_iperf_check_auth_config(self): + iperf_config = { + 'username': {'default': 'test'}, + 'password': {'default': 'testpass'}, + 'rsa-public-key-path': {'default': '/test.pem'}, + } + iperf_conf_wrong_pass = { + 'username': {'default': 'test'}, + 'password': {'default': 'wrongpass'}, + 'rsa-public-key-path': {'default': '/test.pem'}, + } + iperf_conf_wrong_user = { + 'username': {'default': 'wronguser'}, + 'password': {'default': 'testpass'}, + 'rsa-public-key-path': {'default': '/test.pem'}, + } + check, dc = self._create_iperf_test_env() + auth_error = "test authorization failed" + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] + + self._EXPECTED_WARN_CALLS = [ + call(f'Iperf check failed for "{self.device}", error - {auth_error}'), + call(f'Iperf check failed for "{self.device}", error - {auth_error}'), + ] + with self.subTest('Test iperf check with right config'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + settings, 'IPERF_CHECK_CONFIG', iperf_config + ), patch.object( + Iperf, 'schema', get_iperf_schema() + ): + self._set_auth_expected_calls(dc, iperf_config) + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) + + with self.subTest('Test iperf check with wrong password'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn, patch.object( + settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_pass + ), patch.object( + Iperf, 'schema', get_iperf_schema() + ): + self._set_auth_expected_calls(dc, iperf_conf_wrong_pass) + mock_exec_command.side_effect = [ + (RESULT_AUTH_FAIL, 1), + (RESULT_AUTH_FAIL, 1), + ] + + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 0) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) + + with self.subTest('Test iperf check with wrong username'): + with patch.object( + Iperf, '_exec_command' + ) as mock_exec_command, patch.object( + Iperf, + '_get_iperf_servers', + return_value=['iperf.openwisptestserver.com'], + ) as mock_get_iperf_servers, patch.object( + iperf_logger, 'warning' + ) as mock_warn, patch.object( + settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_user + ), patch.object( + Iperf, 'schema', get_iperf_schema() + ): + self._set_auth_expected_calls(dc, iperf_conf_wrong_user) + mock_exec_command.side_effect = [ + (RESULT_AUTH_FAIL, 1), + (RESULT_AUTH_FAIL, 1), + ] + + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 0) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with( + self.device.organization.id + ) From 87ddce9d5df2bf136cbff09037aab5bc3fa67ecc Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 20 Jul 2022 20:59:14 +0530 Subject: [PATCH 46/64] [requested-changes] Changed iperf auth logic & improved tests - Changed iperf auth logic (RSA key in params instead of path). - Added IPERF_CHECK_RSA_KEY_DELETE (If true RSA will be delete from the device). - Added IPERF_CHECK_RSA_KEY_PATH (To set default iperf RSA public key). - Impoved existing tests as per review suggestion. --- openwisp_monitoring/check/classes/iperf.py | 31 +- openwisp_monitoring/check/settings.py | 4 + ...erf_test_result.py => iperf_test_utils.py} | 8 + openwisp_monitoring/check/tests/test_iperf.py | 478 ++++++++---------- 4 files changed, 245 insertions(+), 276 deletions(-) rename openwisp_monitoring/check/tests/{iperf_test_result.py => iperf_test_utils.py} (97%) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 263f58607..aa7ba8142 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -39,10 +39,9 @@ }, 'username': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, 'password': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, - 'rsa-public-key-path': { + 'rsa_public_key': { 'type': 'string', 'default': '', - 'pattern': '^(.*/)([^/]*)$', }, } @@ -53,9 +52,9 @@ def get_iperf_schema(): 'type': 'object', 'additionalProperties': False, "dependencies": { - "username": ["password", "rsa-public-key-path"], - "password": ["username", "rsa-public-key-path"], - "rsa-public-key-path": ["username", "password"], + "username": ["password", "rsa_public_key"], + "password": ["username", "rsa_public_key"], + "rsa_public_key": ["username", "password"], }, } schema['properties'] = deep_merge_dicts( @@ -100,17 +99,27 @@ def check(self, store=True): command_tcp = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' command_udp = f'iperf3 -c {servers[0]} -p {port} -t {time} -u -J' + # All three parameters ie. username, password and rsa_public_key is required + # for authentication to work, checking only username here if username: password = self._get_param('password') - rsa_public_key_path = self._get_param('rsa-public-key-path') - command_tcp = f'IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ + key = self._get_param('rsa_public_key') + rsa_public_key = self._get_compelete_rsa_key(key) + rsa_public_key_path = app_settings.IPERF_CHECK_RSA_KEY_PATH + + command_tcp = f'echo "{rsa_public_key}" > {rsa_public_key_path} && \ + IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ --username "{username}" --rsa-public-key-path {rsa_public_key_path} -J' + command_udp = f'IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ --username "{username}" --rsa-public-key-path {rsa_public_key_path} -u -J' + # If IPERF_CHECK_DELETE_RSA_KEY, delete rsa_public_key from the device + if app_settings.IPERF_CHECK_RSA_KEY_DELETE: + command_udp = f'{command_udp} && rm {rsa_public_key_path}' # TCP mode result, exit_code = self._exec_command(device_connection, command_tcp) - # # Exit code 127 : command doesn't exist + # Exit code 127 : command doesn't exist if exit_code == 127: logger.warning( f'Iperf3 is not installed on the "{device}", error - {result.strip()}' @@ -131,6 +140,12 @@ def check(self, store=True): device_connection.disconnect() return {**result_tcp, **result_udp, 'iperf_result': iperf_result} + def _get_compelete_rsa_key(self, key): + pem_prefix = '-----BEGIN PUBLIC KEY-----\n' + pem_suffix = '\n-----END PUBLIC KEY-----' + key = key.strip() + return f'{pem_prefix}{key}{pem_suffix}' + def _get_device_connection(self, device): """ Returns an active SSH DeviceConnection for a device. diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index e8f0a1b3a..62bae6af5 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -15,4 +15,8 @@ AUTO_IPERF = get_settings_value('AUTO_IPERF', False) IPERF_SERVERS = get_settings_value('IPERF_SERVERS', {}) IPERF_CHECK_CONFIG = get_settings_value('IPERF_CHECK_CONFIG', {}) +IPERF_CHECK_RSA_KEY_PATH = get_settings_value( + 'IPERF_CHECK_RSA_KEY_PATH', '/tmp/iperf-rsa-public.pem' +) +IPERF_CHECK_RSA_KEY_DELETE = get_settings_value('IPERF_CHECK_RSA_KEY_DELETE', True) CHECKS_LIST = get_settings_value('CHECK_LIST', list(dict(CHECK_CLASSES).keys())) diff --git a/openwisp_monitoring/check/tests/iperf_test_result.py b/openwisp_monitoring/check/tests/iperf_test_utils.py similarity index 97% rename from openwisp_monitoring/check/tests/iperf_test_result.py rename to openwisp_monitoring/check/tests/iperf_test_utils.py index ac029e156..6bdd735eb 100644 --- a/openwisp_monitoring/check/tests/iperf_test_result.py +++ b/openwisp_monitoring/check/tests/iperf_test_utils.py @@ -757,3 +757,11 @@ "error": "error - test authorization failed" } """ + +TEST_RSA_KEY = """MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwuEm+iYrfSWJOupy6X3N +dxZvUCxvmoL3uoGAs0O0Y32unUQrwcTIxudy38JSuCccD+k2Rf8S4WuZSiTxaoea +6Du99YQGVZeY67uJ21SWFqWU+w6ONUj3TrNNWoICN7BXGLE2BbSBz9YaXefE3aqw +GhEjQz364Itwm425vHn2MntSp0weWb4hUCjQUyyooRXPrFUGBOuY+VvAvMyAG4Uk +msapnWnBSxXt7Tbb++A5XbOMdM2mwNYDEtkD5ksC/x3EVBrI9FvENsH9+u/8J9Mf +2oPl4MnlCMY86MQypkeUn7eVWfDnseNky7TyC0/IgCXve/iaydCCFdkjyo1MTAA4 +BQIDAQAB""" diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 7185dd490..1aaeff43f 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -11,9 +11,15 @@ from openwisp_monitoring.check.classes.iperf import logger as iperf_logger from ...device.tests import TestDeviceMonitoringMixin -from .. import settings +from .. import settings as app_settings from ..classes import Iperf -from .iperf_test_result import RESULT_AUTH_FAIL, RESULT_FAIL, RESULT_TCP, RESULT_UDP +from .iperf_test_utils import ( + RESULT_AUTH_FAIL, + RESULT_FAIL, + RESULT_TCP, + RESULT_UDP, + TEST_RSA_KEY, +) Chart = load_model('monitoring', 'Chart') AlertSettings = load_model('monitoring', 'AlertSettings') @@ -24,7 +30,7 @@ class TestIperf(CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTestCase): - _IPERF = settings.CHECK_CLASSES[2][0] + _IPERF = app_settings.CHECK_CLASSES[2][0] _RESULT_KEYS = [ 'iperf_result', 'sent_bps_tcp', @@ -76,21 +82,41 @@ def _create_iperf_test_env(self): def _set_auth_expected_calls(self, dc, config): password = config['password']['default'] username = config['username']['default'] - rsa_publickey_path = config['rsa-public-key-path']['default'] + server = 'iperf.openwisptestserver.com' + test_prefix = '-----BEGIN PUBLIC KEY-----\n' + test_suffix = '\n-----END PUBLIC KEY-----' + key = config['rsa_public_key']['default'] + rsa_key_path = app_settings.IPERF_CHECK_RSA_KEY_PATH self._EXPECTED_COMMAND_CALLS = [ call( dc, - f'IPERF3_PASSWORD="{password}" iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 \ - --username "{username}" --rsa-public-key-path {rsa_publickey_path} -J', + f'echo "{test_prefix}{key}{test_suffix}" > {rsa_key_path} && \ + IPERF3_PASSWORD="{password}" iperf3 -c {server} -p 5201 -t 10 \ + --username "{username}" --rsa-public-key-path {rsa_key_path} -J', ), call( dc, - f'IPERF3_PASSWORD="{password}" iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 \ - --username "{username}" --rsa-public-key-path {rsa_publickey_path} -u -J', + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p 5201 -t 10 \ + --username "{username}" --rsa-public-key-path {rsa_key_path} -u -J && rm {rsa_key_path}', ), ] + def _assert_iperf_fail_result(self, result): + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 0) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) + @patch.object(Iperf, '_exec_command') @patch.object( Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] @@ -135,31 +161,34 @@ def test_iperf_check_params( udp_result = loads(RESULT_UDP)['end']['sum'] check, dc = self._create_iperf_test_env() server = 'iperf.openwisptestserver.com' + test_prefix = '-----BEGIN PUBLIC KEY-----\n' + test_suffix = '\n-----END PUBLIC KEY-----' + rsa_key_path = app_settings.IPERF_CHECK_RSA_KEY_PATH test_params = { 'port': 6201, 'time': 20, 'username': 'openwisp-test-user', 'password': 'openwisp_pass', - 'rsa-public-key-path': './public.pem', + 'rsa_public_key': TEST_RSA_KEY, } time = test_params['time'] port = test_params['port'] username = test_params['username'] password = test_params['password'] - rsa_publickey_path = test_params['rsa-public-key-path'] + key = test_params['rsa_public_key'] check.params = test_params check.save() - self._EXPECTED_COMMAND_CALLS = [ call( dc, - f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ - --username "{username}" --rsa-public-key-path {rsa_publickey_path} -J', + f'echo "{test_prefix}{key}{test_suffix}" > {rsa_key_path} && \ + IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ + --username "{username}" --rsa-public-key-path {rsa_key_path} -J', ), call( dc, f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ - --username "{username}" --rsa-public-key-path {rsa_publickey_path} -u -J', + --username "{username}" --rsa-public-key-path {rsa_key_path} -u -J && rm {rsa_key_path}', ), ] result = check.perform_check(store=False) @@ -184,7 +213,7 @@ def test_iperf_check_params( Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] ) @patch.object( - settings, + app_settings, 'IPERF_CHECK_CONFIG', { 'port': {'default': 9201}, @@ -321,228 +350,183 @@ def test_iperf_check_schema_violation(self): else: self.fail('ValidationError not raised') - def test_iperf_check(self): + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + @patch.object(iperf_logger, 'warning') + def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command): check, _ = self._create_iperf_test_env() error = "ash: iperf3: not found" tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] with self.subTest('Test iperf3 is not installed on the device'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers: - mock_exec_command.side_effect = [(error, 127)] - with patch.object(iperf_logger, 'warning') as mock_warn: - check.perform_check(store=False) - mock_warn.assert_called_with( - f'Iperf3 is not installed on the "{self.device}", error - {error}' - ) - self.assertEqual(mock_warn.call_count, 1) - self.assertEqual(mock_exec_command.call_count, 1) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + mock_exec_command.side_effect = [(error, 127)] + check.perform_check(store=False) + mock_warn.assert_called_with( + f'Iperf3 is not installed on the "{self.device}", error - {error}' + ) + self.assertEqual(mock_warn.call_count, 1) + self.assertEqual(mock_exec_command.call_count, 1) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() with self.subTest('Test iperf check passes in both TCP & UDP'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn: - mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - self.assertEqual(Chart.objects.count(), 2) - self.assertEqual(Metric.objects.count(), 2) - result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) - self.assertEqual( - result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] - ) - self.assertEqual( - result['received_bps_tcp'], - tcp_result['sum_received']['bits_per_second'], - ) - self.assertEqual( - result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes'] - ) - self.assertEqual( - result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] - ) - self.assertEqual( - result['retransmits'], tcp_result['sum_sent']['retransmits'] - ) - self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) - self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) - self.assertEqual(result['jitter'], udp_result['jitter_ms']) - self.assertEqual(result['total_packets'], udp_result['packets']) - self.assertEqual(result['lost_percent'], udp_result['lost_percent']) - self.assertEqual(Chart.objects.count(), 10) - self.assertEqual(Check.objects.count(), 3) - - iperf_metric = Metric.objects.get(key='iperf') - self.assertEqual(Metric.objects.count(), 3) - self.assertEqual(iperf_metric.content_object, self.device) - points = iperf_metric.read(limit=None, extra_fields=list(result.keys())) - self.assertEqual(len(points), 1) - self.assertEqual(points[0]['iperf_result'], result['iperf_result']) - self.assertEqual(points[0]['sent_bps_tcp'], result['sent_bps_tcp']) - self.assertEqual( - points[0]['received_bytes_tcp'], result['received_bytes_tcp'] - ) - self.assertEqual(points[0]['retransmits'], result['retransmits']) - self.assertEqual(points[0]['sent_bps_udp'], result['sent_bps_udp']) - self.assertEqual(points[0]['sent_bytes_udp'], result['sent_bytes_udp']) - self.assertEqual(points[0]['jitter'], result['jitter']) - self.assertEqual(points[0]['total_packets'], result['total_packets']) - self.assertEqual(points[0]['lost_packets'], result['lost_packets']) - self.assertEqual(points[0]['lost_percent'], result['lost_percent']) - - self.assertEqual(mock_warn.call_count, 0) - self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + self.assertEqual(Chart.objects.count(), 2) + self.assertEqual(Metric.objects.count(), 2) + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bps_tcp'], + tcp_result['sum_received']['bits_per_second'], + ) + self.assertEqual(result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes']) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual( + result['retransmits'], tcp_result['sum_sent']['retransmits'] + ) + self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) + self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(result['lost_percent'], udp_result['lost_percent']) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Check.objects.count(), 3) + + iperf_metric = Metric.objects.get(key='iperf') + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(iperf_metric.content_object, self.device) + points = iperf_metric.read(limit=None, extra_fields=list(result.keys())) + self.assertEqual(len(points), 1) + self.assertEqual(points[0]['iperf_result'], result['iperf_result']) + self.assertEqual(points[0]['sent_bps_tcp'], result['sent_bps_tcp']) + self.assertEqual( + points[0]['received_bytes_tcp'], result['received_bytes_tcp'] + ) + self.assertEqual(points[0]['retransmits'], result['retransmits']) + self.assertEqual(points[0]['sent_bps_udp'], result['sent_bps_udp']) + self.assertEqual(points[0]['sent_bytes_udp'], result['sent_bytes_udp']) + self.assertEqual(points[0]['jitter'], result['jitter']) + self.assertEqual(points[0]['total_packets'], result['total_packets']) + self.assertEqual(points[0]['lost_packets'], result['lost_packets']) + self.assertEqual(points[0]['lost_percent'], result['lost_percent']) + + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() with self.subTest('Test iperf check fails in both TCP & UDP'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn: - mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] - result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 0) - self.assertEqual(result['sent_bps_tcp'], 0.0) - self.assertEqual(result['received_bps_tcp'], 0.0) - self.assertEqual(result['sent_bytes_tcp'], 0) - self.assertEqual(result['received_bytes_tcp'], 0) - self.assertEqual(result['retransmits'], 0) - self.assertEqual(result['sent_bps_udp'], 0.0) - self.assertEqual(result['sent_bytes_udp'], 0) - self.assertEqual(result['jitter'], 0.0) - self.assertEqual(result['total_packets'], 0) - self.assertEqual(result['lost_percent'], 0.0) - self.assertEqual(Chart.objects.count(), 10) - self.assertEqual(Metric.objects.count(), 3) - self.assertEqual(mock_exec_command.call_count, 2) - mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + result = check.perform_check(store=False) + self._assert_iperf_fail_result(result) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() with self.subTest('Test iperf check TCP pass UDP fail'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn: - mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_FAIL, 1)] + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_FAIL, 1)] - result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) - self.assertEqual( - result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] - ) - self.assertEqual( - result['received_bps_tcp'], - tcp_result['sum_received']['bits_per_second'], - ) - self.assertEqual( - result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes'] - ) - self.assertEqual( - result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] - ) - self.assertEqual( - result['retransmits'], tcp_result['sum_sent']['retransmits'] - ) - self.assertEqual(result['sent_bps_udp'], 0.0) - self.assertEqual(result['sent_bytes_udp'], 0) - self.assertEqual(result['jitter'], 0.0) - self.assertEqual(result['total_packets'], 0) - self.assertEqual(result['lost_percent'], 0.0) - self.assertEqual(Metric.objects.count(), 3) - self.assertEqual(mock_exec_command.call_count, 2) - mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bps_tcp'], + tcp_result['sum_received']['bits_per_second'], + ) + self.assertEqual(result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes']) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual( + result['retransmits'], tcp_result['sum_sent']['retransmits'] + ) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() with self.subTest('Test iperf check TCP fail UDP pass'): - with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn: - mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_UDP, 0)] + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_UDP, 0)] - result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) - self.assertEqual(result['sent_bps_tcp'], 0.0) - self.assertEqual(result['received_bps_tcp'], 0.0) - self.assertEqual(result['sent_bytes_tcp'], 0) - self.assertEqual(result['received_bytes_tcp'], 0) - self.assertEqual(result['retransmits'], 0) - self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) - self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) - self.assertEqual(result['jitter'], udp_result['jitter_ms']) - self.assertEqual(result['total_packets'], udp_result['packets']) - self.assertEqual(result['lost_percent'], udp_result['lost_percent']) - self.assertEqual(Chart.objects.count(), 10) - self.assertEqual(Metric.objects.count(), 3) - self.assertEqual(mock_exec_command.call_count, 2) - mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) + self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(result['lost_percent'], udp_result['lost_percent']) + self.assertEqual(Chart.objects.count(), 10) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) - def test_iperf_check_auth_config(self): + @patch.object(Iperf, '_exec_command') + @patch.object( + Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] + ) + @patch.object(iperf_logger, 'warning') + def test_iperf_check_auth_config( + self, mock_warn, mock_get_iperf_servers, mock_exec_command + ): iperf_config = { 'username': {'default': 'test'}, 'password': {'default': 'testpass'}, - 'rsa-public-key-path': {'default': '/test.pem'}, + 'rsa_public_key': {'default': TEST_RSA_KEY}, } iperf_conf_wrong_pass = { 'username': {'default': 'test'}, 'password': {'default': 'wrongpass'}, - 'rsa-public-key-path': {'default': '/test.pem'}, + 'rsa_public_key': {'default': TEST_RSA_KEY}, } iperf_conf_wrong_user = { 'username': {'default': 'wronguser'}, 'password': {'default': 'testpass'}, - 'rsa-public-key-path': {'default': '/test.pem'}, + 'rsa_public_key': {'default': TEST_RSA_KEY}, } check, dc = self._create_iperf_test_env() auth_error = "test authorization failed" @@ -555,16 +539,12 @@ def test_iperf_check_auth_config(self): ] with self.subTest('Test iperf check with right config'): with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - settings, 'IPERF_CHECK_CONFIG', iperf_config - ), patch.object( - Iperf, 'schema', get_iperf_schema() - ): + app_settings, + 'IPERF_CHECK_CONFIG', + iperf_config + # It is required to mock "Iperf.schema" here so that it + # uses the updated configuration from "IPERF_CHECK_CONFIG" setting. + ), patch.object(Iperf, 'schema', get_iperf_schema()): self._set_auth_expected_calls(dc, iperf_config) mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] @@ -585,21 +565,14 @@ def test_iperf_check_auth_config(self): mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() with self.subTest('Test iperf check with wrong password'): with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn, patch.object( - settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_pass - ), patch.object( - Iperf, 'schema', get_iperf_schema() - ): + app_settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_pass + ), patch.object(Iperf, 'schema', get_iperf_schema()): self._set_auth_expected_calls(dc, iperf_conf_wrong_pass) mock_exec_command.side_effect = [ (RESULT_AUTH_FAIL, 1), @@ -607,40 +580,21 @@ def test_iperf_check_auth_config(self): ] result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 0) - self.assertEqual(result['sent_bps_tcp'], 0.0) - self.assertEqual(result['received_bps_tcp'], 0.0) - self.assertEqual(result['sent_bytes_tcp'], 0) - self.assertEqual(result['received_bytes_tcp'], 0) - self.assertEqual(result['retransmits'], 0) - self.assertEqual(result['sent_bps_udp'], 0.0) - self.assertEqual(result['sent_bytes_udp'], 0) - self.assertEqual(result['jitter'], 0.0) - self.assertEqual(result['total_packets'], 0) - self.assertEqual(result['lost_percent'], 0.0) + self._assert_iperf_fail_result(result) self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) mock_get_iperf_servers.assert_called_once_with( self.device.organization.id ) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() with self.subTest('Test iperf check with wrong username'): with patch.object( - Iperf, '_exec_command' - ) as mock_exec_command, patch.object( - Iperf, - '_get_iperf_servers', - return_value=['iperf.openwisptestserver.com'], - ) as mock_get_iperf_servers, patch.object( - iperf_logger, 'warning' - ) as mock_warn, patch.object( - settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_user - ), patch.object( - Iperf, 'schema', get_iperf_schema() - ): + app_settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_user + ), patch.object(Iperf, 'schema', get_iperf_schema()): self._set_auth_expected_calls(dc, iperf_conf_wrong_user) mock_exec_command.side_effect = [ (RESULT_AUTH_FAIL, 1), @@ -648,19 +602,7 @@ def test_iperf_check_auth_config(self): ] result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 0) - self.assertEqual(result['sent_bps_tcp'], 0.0) - self.assertEqual(result['received_bps_tcp'], 0.0) - self.assertEqual(result['sent_bytes_tcp'], 0) - self.assertEqual(result['received_bytes_tcp'], 0) - self.assertEqual(result['retransmits'], 0) - self.assertEqual(result['sent_bps_udp'], 0.0) - self.assertEqual(result['sent_bytes_udp'], 0) - self.assertEqual(result['jitter'], 0.0) - self.assertEqual(result['total_packets'], 0) - self.assertEqual(result['lost_percent'], 0.0) + self._assert_iperf_fail_result(result) self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) From fa74e9960a59bb83d7daf1a9a7568b78af77451f Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Thu, 21 Jul 2022 18:38:24 +0530 Subject: [PATCH 47/64] [docs] Added iperf auth --- README.rst | 143 +++++++++++++++++++-- openwisp_monitoring/check/classes/iperf.py | 4 +- openwisp_monitoring/check/settings.py | 2 +- 3 files changed, 138 insertions(+), 11 deletions(-) diff --git a/README.rst b/README.rst index fb90f95a8..0480f0cd5 100644 --- a/README.rst +++ b/README.rst @@ -378,12 +378,17 @@ Configure celery (you may use a different broker if you want): CELERY_TIMEZONE = TIME_ZONE CELERY_BROKER_URL = 'redis://localhost/1' CELERY_BEAT_SCHEDULE = { - # Celery beat configuration for auto checks ie ping & config applied - 'run_checks': { - 'task': 'openwisp_monitoring.check.tasks.run_checks', - 'schedule': timedelta(minutes=5), - 'args': (None,), - 'relative': True, + 'run_checks': { + 'task': 'openwisp_monitoring.check.tasks.run_checks', + # Executes only ping & config check every 5 min + 'schedule': timedelta(minutes=5), + 'args': ( + [ # Checks path + 'openwisp_monitoring.check.classes.Ping', + 'openwisp_monitoring.check.classes.ConfigApplied', + ], + ), + 'relative': True, }, # Delete old WifiSession 'delete_wifi_clients_and_sessions': { @@ -1015,8 +1020,8 @@ This check is **disabled by default**. You can enable auto creation of this chec It also supports tuning of various parameters. -You can also change the parameters used for iperf checks (e.g. timing, port, buffer, etc) using the -`OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ setting. +You can also change the parameters used for iperf checks (e.g. timing, port, username, password, rsa_publc_key etc) +using the `OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ setting. Usage Instructions ------------------ @@ -1096,6 +1101,102 @@ iperf network measurements charts. .. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/iperf-charts.png :alt: Iperf network measurement charts + +Configure Iperf check for authentication +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +By default iperf check runs without any kind of **authentication**, But we can configure it to use **RSA authentication** +between the **client** and the **server** to restrict the connections to the server & only allow legitimate clients. + +At Iperf server +############### + +1. Generate RSA keypair +^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: shell + + openssl genrsa -des3 -out private.pem 2048 + openssl rsa -in private.pem -outform PEM -pubout -out public.pem + openssl rsa -in private.pem -out private_not_protected.pem -outform PEM + +After running above mention commands, the public key will be contained in the +file ``public.pem`` which will be used in **rsa_public_key** parameter +in `OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ +and the private key will be contained in the file ``private_not_protected.pem`` +which will be used with **--rsa-private-key-path** command option at iperf server. + +2. Create user credentials +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: shell + + USER=iperfuser PASSWD=iperfpass + echo -n "{$USER}$PASSWD" | sha256sum | awk '{ print $1 }' + ---- + ee17a7f98cc87a6424fb52682396b2b6c058e9ab70e946188faa0714905771d7 #This is the hash of "iperfuser" + +Add the above hash with username in ``credentials.csv`` + +.. code-block:: shell + + # file format: username,sha256 + iperfuser,ee17a7f98cc87a6424fb52682396b2b6c058e9ab70e946188faa0714905771d7 + +3. Now start the iperf server with auth options +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: shell + + iperf3 -s --rsa-private-key-path ./private_not_protected.pem --authorized-users-path ./credentials.csv + +At client (openwrt device) +########################## + +1. Install iperf3-ssl +^^^^^^^^^^^^^^^^^^^^^ + +Install `iperf3-ssl openwrt package `_ instead of normal +`iperf3 openwrt package `_ which comes without any authentication. + +You may also check your installed **iperf3 openwrt package** features: + +.. code-block:: shell + + root@vm-openwrt:~ iperf3 -v + iperf 3.7 (cJSON 1.5.2) + Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64 + Optional features available: CPU affinity setting, IPv6 flow label, TCP congestion algorithm setting, + sendfile / zerocopy, socket pacing, authentication # contains 'authentication' + +2. Configure iperf check auth parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Now, add the following iperf authentication parameters +to `OPENWISP_MONITORING_IPERF_CHECK_CONFIG <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ +in `openwisp settings `_ + +.. code-block:: python + + OPENWISP_MONITORING_IPERF_CHECK_CONFIG = { + # All three parameters are required + 'username': {'default': 'iperfuser'}, + 'password': {'default': 'iperfpass'}, + # RSA public key without any headers + # ie. -----BEGIN PUBLIC KEY-----, -----BEGIN END KEY----- + 'rsa_public_key': { + 'default': """ + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwuEm+iYrfSWJOupy6X3N + dxZvUCxvmoL3uoGAs0O0Y32unUQrwcTIxudy38JSuCccD+k2Rf8S4WuZSiTxaoea + 6Du99YQGVZeY67uJ21SWFqWU+w6ONUj3TrNNWoICN7BXGLE2BbSBz9YaXefE3aqw + GhEjQz364Itwm425vHn2MntSp0weWb4hUCjQUyyooRXPrFUGBOuY+VvAvMyAG4Uk + msapnWnBSxXt7Tbb++A5XbOMdM2mwNYDEtkD5ksC/x3EVBrI9FvENsH9+u/8J9Mf + 2oPl4MnlCMY86MQypkeUn7eVWfDnseNky7TyC0/IgCXve/iaydCCFdkjyo1MTAA4 + BQIDAQAB + """ + }, + } + Settings -------- @@ -1202,6 +1303,32 @@ created automatically for newly registered devices. It's enabled by default. This setting allows you to choose whether `iperf <#iperf-1>`_ checks should be created automatically for newly registered devices. It's disabled by default. +``OPENWISP_MONITORING_IPERF_CHECK_RSA_KEY_PATH`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------------------------+ +| **type**: | ``str`` | ++--------------+-------------------------------+ +| **default**: | ``/tmp/iperf-rsa-public.pem`` | ++--------------+-------------------------------+ + +This setting allows you to choose RSA public key path (relative to ``/root``) +for `iperf check running with authentication <#configure-iperf-check-for-authentication>`_. + +``OPENWISP_MONITORING_IPERF_CHECK_DELETE_RSA_KEY`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------------------------+ +| **type**: | ``bool`` | ++--------------+-------------------------------+ +| **default**: | ``True`` | ++--------------+-------------------------------+ + +This setting allows you to set whether +`iperf check RSA public key <#configure-iperf-check-for-authentication>`_ will be deleted from +`OPENWISP_MONITORING_IPERF_CHECK_RSA_KEY_PATH <#OPENWISP_MONITORING_IPERF_CHECK_RSA_KEY_PATH>`_ +after successful completion of the check or not. + ``OPENWISP_MONITORING_IPERF_CHECK_CONFIG`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index aa7ba8142..0f8a3cb42 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -113,8 +113,8 @@ def check(self, store=True): command_udp = f'IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ --username "{username}" --rsa-public-key-path {rsa_public_key_path} -u -J' - # If IPERF_CHECK_DELETE_RSA_KEY, delete rsa_public_key from the device - if app_settings.IPERF_CHECK_RSA_KEY_DELETE: + # If IPERF_CHECK_DELETE_RSA_KEY, remove rsa_public_key from the device + if app_settings.IPERF_CHECK_DELETE_RSA_KEY: command_udp = f'{command_udp} && rm {rsa_public_key_path}' # TCP mode diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 62bae6af5..3afee16f2 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -18,5 +18,5 @@ IPERF_CHECK_RSA_KEY_PATH = get_settings_value( 'IPERF_CHECK_RSA_KEY_PATH', '/tmp/iperf-rsa-public.pem' ) -IPERF_CHECK_RSA_KEY_DELETE = get_settings_value('IPERF_CHECK_RSA_KEY_DELETE', True) +IPERF_CHECK_DELETE_RSA_KEY = get_settings_value('IPERF_CHECK_DELETE_RSA_KEY', True) CHECKS_LIST = get_settings_value('CHECK_LIST', list(dict(CHECK_CLASSES).keys())) From 1d71d7c6009d30b285069fdb06ad8dfc543e2b91 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 22 Jul 2022 18:04:12 +0530 Subject: [PATCH 48/64] [change] Changed chart types for iperf check #417 - Connected data points with new bar+lines chart type. - Added fill(0) to the chart query to fill blank data points(ie.N/A). Closes #417 --- .../db/backends/influxdb/queries.py | 20 ++++----- .../monitoring/configuration.py | 43 ++++++++++++++----- .../monitoring/static/monitoring/js/chart.js | 12 +++++- 3 files changed, 54 insertions(+), 21 deletions(-) diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 48310c05b..083f02050 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -102,10 +102,10 @@ }, 'bandwidth_tcp': { 'influxdb': ( - "SELECT MEAN(sent_bps_tcp) / 1000000000 AS sent, " - "MEAN(received_bps_tcp) / 1000000000 AS received FROM {key} WHERE " + "SELECT MEAN(received_bps_tcp) / 1000000000 AS received, " + "MEAN(sent_bps_tcp) / 1000000000 AS sent FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d)" + "object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'transfer_tcp': { @@ -114,35 +114,35 @@ "SUM(sent_bytes_tcp) / 1000000000 AS sent," "((SUM(sent_bytes_tcp) + SUM(received_bytes_tcp)) / 1000000000) AS total FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d)" + "object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'retransmits': { 'influxdb': ( "SELECT MEAN(retransmits) AS retransmits FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'bandwidth_udp': { 'influxdb': ( "SELECT MEAN(sent_bps_udp) / 1000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'transfer_udp': { 'influxdb': ( "SELECT SUM(sent_bytes_udp) / 1000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d)" + "object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'jitter': { 'influxdb': ( "SELECT MEAN(jitter) AS jitter FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'datagram': { @@ -150,14 +150,14 @@ "SELECT MEAN(lost_packets) AS lost_datagram," "MEAN(total_packets) AS total_datagram FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, 'datagram_loss': { 'influxdb': ( "SELECT MEAN(lost_percent) AS datagram_loss FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" ) }, } diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 44c5d792c..293d80c22 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -226,6 +226,7 @@ def _get_access_tech(): 'charts': { 'general_traffic': { 'type': 'stackedbar+lines', + 'fill': 'none', 'trace_type': { 'download': 'stackedbar', 'upload': 'stackedbar', @@ -563,7 +564,12 @@ def _get_access_tech(): 'alert_on_related_field': 'jitter', 'charts': { 'bandwidth_tcp': { - 'type': 'stackedbar', + 'type': 'stackedbar+lines', + 'trace_type': { + 'received': 'lines', + 'sent': 'lines', + }, + 'fill': 'none', 'title': _('Bandwidth (TCP)'), 'description': _('Bitrate during Iperf3 test in TCP mode.'), 'summary_labels': [ @@ -573,7 +579,7 @@ def _get_access_tech(): 'unit': _(' Gbps'), 'order': 280, 'query': chart_query['bandwidth_tcp'], - 'colors': [DEFAULT_COLORS[5], DEFAULT_COLORS[9]], + 'colors': [DEFAULT_COLORS[5], DEFAULT_COLORS[0]], }, 'transfer_tcp': { 'type': 'stackedbar+lines', @@ -596,7 +602,10 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[7], DEFAULT_COLORS[2], DEFAULT_COLORS[4]], }, 'retransmits': { - 'type': 'bar', + 'type': 'bar+lines', + 'trace_type': { + 'retransmits': 'lines', + }, 'title': _('Retransmits'), 'description': _('No. of retransmits during Iperf3 test in TCP mode.'), 'summary_labels': [_('Restransmits')], @@ -606,27 +615,36 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[-3]], }, 'bandwidth_udp': { - 'type': 'bar', + 'type': 'bar+lines', + 'trace_type': { + 'sent': 'lines', + }, 'title': _('Bandwidth (UDP)'), 'description': _('Bitrate during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bitrate')], 'unit': _(' Mbps'), 'order': 310, 'query': chart_query['bandwidth_udp'], - 'colors': [DEFAULT_COLORS[5]], + 'colors': [DEFAULT_COLORS[0]], }, 'transfer_udp': { - 'type': 'bar', + 'type': 'bar+lines', + 'trace_type': { + 'sent': 'lines', + }, 'title': _('Transfer (UDP)'), 'description': _('Transfer during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bytes')], 'unit': _(' MB'), 'order': 320, 'query': chart_query['transfer_udp'], - 'colors': [DEFAULT_COLORS[4]], + 'colors': [DEFAULT_COLORS[5]], }, 'jitter': { - 'type': 'scatter', + 'type': 'bar+lines', + 'trace_type': { + 'jitter': 'lines', + }, 'title': _('Jitter'), 'description': _( 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode.' @@ -640,7 +658,12 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[4]], }, 'datagram': { - 'type': 'stackedbar', + 'type': 'stackedbar+lines', + 'fill': 'none', + 'trace_type': { + 'total_datagram': 'lines', + 'lost_datagram': 'lines', + }, 'title': _('Datagram'), 'description': _( 'Lost/Total datagram ratio measured by Iperf3 test in UDP mode.' @@ -666,7 +689,7 @@ def _get_access_tech(): 'unit': '%', 'order': 350, 'query': chart_query['datagram_loss'], - 'colors': [DEFAULT_COLORS[8]], + 'colors': [DEFAULT_COLORS[3]], }, }, 'alert_settings': {'operator': '>', 'threshold': 5, 'tolerance': 0}, diff --git a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js index ee6933116..a09d58da0 100644 --- a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js +++ b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js @@ -188,6 +188,16 @@ options.x = ['']; options.histfunc = 'sum'; } + if (type.includes('bar')) { + options.type = 'bar'; + if (type === 'bar+lines') { + if (data.trace_type[key] === 'lines') { + options.type = 'scatter'; + options.mode = 'lines+markers'; + options.line = {shape: 'hvh'}; + } + } + } if (type.includes('stackedbar')) { layout.barmode = 'stack'; options.type = 'bar'; @@ -196,7 +206,7 @@ options.type = 'scatter'; options.mode = 'lines+markers'; options.line = {shape: 'hvh'}; - options.fill = "none"; + options.fill = data.fill; } } } From 50256d20778eec1cc747ced27d54aa686a1e4f6b Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Fri, 22 Jul 2022 19:56:17 +0530 Subject: [PATCH 49/64] [change] Changed chart look according to feedback --- .../db/backends/influxdb/queries.py | 16 ++++----- .../monitoring/configuration.py | 35 ++++++------------- .../monitoring/static/monitoring/js/chart.js | 10 ------ 3 files changed, 19 insertions(+), 42 deletions(-) diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 083f02050..55559f8de 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -105,7 +105,7 @@ "SELECT MEAN(received_bps_tcp) / 1000000000 AS received, " "MEAN(sent_bps_tcp) / 1000000000 AS sent FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'transfer_tcp': { @@ -114,35 +114,35 @@ "SUM(sent_bytes_tcp) / 1000000000 AS sent," "((SUM(sent_bytes_tcp) + SUM(received_bytes_tcp)) / 1000000000) AS total FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'retransmits': { 'influxdb': ( "SELECT MEAN(retransmits) AS retransmits FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'bandwidth_udp': { 'influxdb': ( "SELECT MEAN(sent_bps_udp) / 1000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'transfer_udp': { 'influxdb': ( "SELECT SUM(sent_bytes_udp) / 1000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'jitter': { 'influxdb': ( "SELECT MEAN(jitter) AS jitter FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'datagram': { @@ -150,14 +150,14 @@ "SELECT MEAN(lost_packets) AS lost_datagram," "MEAN(total_packets) AS total_datagram FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'datagram_loss': { 'influxdb': ( "SELECT MEAN(lost_percent) AS datagram_loss FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(0)" + "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, } diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 293d80c22..df71dd638 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -564,11 +564,7 @@ def _get_access_tech(): 'alert_on_related_field': 'jitter', 'charts': { 'bandwidth_tcp': { - 'type': 'stackedbar+lines', - 'trace_type': { - 'received': 'lines', - 'sent': 'lines', - }, + 'type': 'stackedbar', 'fill': 'none', 'title': _('Bandwidth (TCP)'), 'description': _('Bitrate during Iperf3 test in TCP mode.'), @@ -599,10 +595,14 @@ def _get_access_tech(): 'unit': _(' GB'), 'order': 290, 'query': chart_query['transfer_tcp'], - 'colors': [DEFAULT_COLORS[7], DEFAULT_COLORS[2], DEFAULT_COLORS[4]], + 'colors': [ + DEFAULT_COLORS[7], + DEFAULT_COLORS[0], + DEFAULT_COLORS[1], + ], }, 'retransmits': { - 'type': 'bar+lines', + 'type': 'scatter', 'trace_type': { 'retransmits': 'lines', }, @@ -615,10 +615,7 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[-3]], }, 'bandwidth_udp': { - 'type': 'bar+lines', - 'trace_type': { - 'sent': 'lines', - }, + 'type': 'scatter', 'title': _('Bandwidth (UDP)'), 'description': _('Bitrate during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bitrate')], @@ -628,10 +625,7 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[0]], }, 'transfer_udp': { - 'type': 'bar+lines', - 'trace_type': { - 'sent': 'lines', - }, + 'type': 'scatter', 'title': _('Transfer (UDP)'), 'description': _('Transfer during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bytes')], @@ -641,10 +635,7 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[5]], }, 'jitter': { - 'type': 'bar+lines', - 'trace_type': { - 'jitter': 'lines', - }, + 'type': 'scatter', 'title': _('Jitter'), 'description': _( 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode.' @@ -658,12 +649,8 @@ def _get_access_tech(): 'colors': [DEFAULT_COLORS[4]], }, 'datagram': { - 'type': 'stackedbar+lines', + 'type': 'scatter', 'fill': 'none', - 'trace_type': { - 'total_datagram': 'lines', - 'lost_datagram': 'lines', - }, 'title': _('Datagram'), 'description': _( 'Lost/Total datagram ratio measured by Iperf3 test in UDP mode.' diff --git a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js index a09d58da0..0baa83483 100644 --- a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js +++ b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js @@ -188,16 +188,6 @@ options.x = ['']; options.histfunc = 'sum'; } - if (type.includes('bar')) { - options.type = 'bar'; - if (type === 'bar+lines') { - if (data.trace_type[key] === 'lines') { - options.type = 'scatter'; - options.mode = 'lines+markers'; - options.line = {shape: 'hvh'}; - } - } - } if (type.includes('stackedbar')) { layout.barmode = 'stack'; options.type = 'bar'; From 01cd70e1de0202efcbc4fac054249258605e2e41 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 25 Jul 2022 19:40:13 +0530 Subject: [PATCH 50/64] [change] Combined all iperf settings into one setting #418 - Combined all iperf settings into one setting. - Removed default from OPENWISP_MONITORING_IPERF_CHECK_CONFIG - The order of getting params for iperf check is: check_params(through admin panel)-> OPENWISP_MONITORING_IPERF_CHECK_CONFIG-> DEFAULT_IPERF_CHECK_CONFIG - Updated iperf tests. Closes #418 --- openwisp_monitoring/check/classes/iperf.py | 162 ++++++++++++------ openwisp_monitoring/check/tests/test_iperf.py | 154 ++++++++--------- tests/openwisp2/settings.py | 9 - 3 files changed, 188 insertions(+), 137 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 0f8a3cb42..3dfc6fc6e 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -1,4 +1,5 @@ import logging +from functools import reduce from json import loads from json.decoder import JSONDecodeError @@ -8,7 +9,6 @@ from swapper import load_model from openwisp_controller.connection.settings import UPDATE_STRATEGIES -from openwisp_utils.utils import deep_merge_dicts from .. import settings as app_settings from .base import BaseCheck @@ -21,21 +21,12 @@ DeviceConnection = load_model('connection', 'DeviceConnection') DEFAULT_IPERF_CHECK_CONFIG = { - 'port': { - 'type': 'integer', - 'default': 5201, - # max, min port chosen from iperf3 docs - 'minimum': 1, - 'maximum': 65535, - }, - 'time': { - 'type': 'integer', - # Sets the interval time in seconds - # between periodic bandwidth, jitter, and loss reports. - 'default': 10, - 'minimum': 1, - # arbitrary chosen to avoid slowing down the queue (30min) - 'maximum': 1800, + 'host': { + 'type': 'array', + 'items': { + 'type': 'string', + }, + 'default': [], }, 'username': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, 'password': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, @@ -43,6 +34,45 @@ 'type': 'string', 'default': '', }, + 'client_options': { + 'type': 'object', + 'properties': { + 'port': { + 'type': 'integer', + 'default': 5201, + # max, min port chosen from iperf3 docs + 'minimum': 1, + 'maximum': 65535, + }, + 'time': { + 'type': 'integer', + # Sets the interval time in seconds + # between periodic bandwidth, jitter, and loss reports. + 'default': 10, + 'minimum': 1, + # arbitrary chosen to avoid slowing down the queue (30min) + 'maximum': 1800, + }, + 'udp': { + 'type': 'object', + 'properties': { + 'bitrate': { + 'type': 'string', + 'default': '10M', + } + }, + }, + 'tcp': { + 'type': 'object', + 'properties': { + 'bitrate': { + 'type': 'string', + 'default': '0', + } + }, + }, + }, + }, } @@ -50,16 +80,15 @@ def get_iperf_schema(): schema = { '$schema': 'http://json-schema.org/draft-07/schema#', 'type': 'object', - 'additionalProperties': False, - "dependencies": { - "username": ["password", "rsa_public_key"], - "password": ["username", "rsa_public_key"], - "rsa_public_key": ["username", "password"], + 'additionalProperties': True, + 'dependencies': { + 'client_options': ['host'], + 'username': ['password', 'rsa_public_key', 'host'], + 'password': ['username', 'rsa_public_key', 'host'], + 'rsa_public_key': ['username', 'password', 'host'], }, } - schema['properties'] = deep_merge_dicts( - DEFAULT_IPERF_CHECK_CONFIG, app_settings.IPERF_CHECK_CONFIG - ) + schema['properties'] = DEFAULT_IPERF_CHECK_CONFIG return schema @@ -69,6 +98,11 @@ class Iperf(BaseCheck): def validate_params(self): try: + params = self.params + org_id = str(self.related_object.organization.id) + iperf_config = app_settings.IPERF_CHECK_CONFIG + if not params and iperf_config: + params = iperf_config[org_id] validate(self.params, self.schema, format_checker=draft7_format_checker) except SchemaError as e: message = 'Invalid param' @@ -79,39 +113,42 @@ def validate_params(self): raise ValidationError({'params': message}) from e def check(self, store=True): - port = self._get_param('port') - time = self._get_param('time') - username = self._get_param('username') - device = self.related_object - device_connection = self._get_device_connection(device) + port = self._get_param( + 'client_options.port', 'client_options.properties.port.default' + ) + time = self._get_param( + 'client_options.time', 'client_options.properties.time.default' + ) + username = self._get_param('username', 'username.default') + device_connection = self._get_device_connection() if not device_connection: logger.warning( - f'Failed to get a working DeviceConnection for "{device}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{self.related_object}", iperf check skipped!' ) return # The DeviceConnection could fail if the management tunnel is down. if not self._connect(device_connection): logger.warning( - f'DeviceConnection for "{device}" is not working, iperf check skipped!' + f'DeviceConnection for "{self.related_object}" is not working, iperf check skipped!' ) return - servers = self._get_iperf_servers(device.organization.id) - command_tcp = f'iperf3 -c {servers[0]} -p {port} -t {time} -J' - command_udp = f'iperf3 -c {servers[0]} -p {port} -t {time} -u -J' + server = self._get_iperf_servers()[0] + command_tcp = f'iperf3 -c {server} -p {port} -t {time} -J' + command_udp = f'iperf3 -c {server} -p {port} -t {time} -u -J' # All three parameters ie. username, password and rsa_public_key is required # for authentication to work, checking only username here if username: - password = self._get_param('password') - key = self._get_param('rsa_public_key') + password = self._get_param('password', 'password.default') + key = self._get_param('rsa_public_key', 'rsa_public_key.default') rsa_public_key = self._get_compelete_rsa_key(key) rsa_public_key_path = app_settings.IPERF_CHECK_RSA_KEY_PATH command_tcp = f'echo "{rsa_public_key}" > {rsa_public_key_path} && \ - IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ + IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ --username "{username}" --rsa-public-key-path {rsa_public_key_path} -J' - command_udp = f'IPERF3_PASSWORD="{password}" iperf3 -c {servers[0]} -p {port} -t {time} \ + command_udp = f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ --username "{username}" --rsa-public-key-path {rsa_public_key_path} -u -J' # If IPERF_CHECK_DELETE_RSA_KEY, remove rsa_public_key from the device if app_settings.IPERF_CHECK_DELETE_RSA_KEY: @@ -122,14 +159,14 @@ def check(self, store=True): # Exit code 127 : command doesn't exist if exit_code == 127: logger.warning( - f'Iperf3 is not installed on the "{device}", error - {result.strip()}' + f'Iperf3 is not installed on the "{self.related_object}", error - {result.strip()}' ) return - result_tcp = self._get_iperf_result(result, exit_code, device, mode='TCP') + result_tcp = self._get_iperf_result(result, exit_code, mode='TCP') # UDP mode result, exit_code = self._exec_command(device_connection, command_udp) - result_udp = self._get_iperf_result(result, exit_code, device, mode='UDP') + result_udp = self._get_iperf_result(result, exit_code, mode='UDP') if store: # Store iperf_result field 1 if any mode passes, store 0 when both fails @@ -146,23 +183,23 @@ def _get_compelete_rsa_key(self, key): key = key.strip() return f'{pem_prefix}{key}{pem_suffix}' - def _get_device_connection(self, device): + def _get_device_connection(self): """ Returns an active SSH DeviceConnection for a device. """ openwrt_ssh = UPDATE_STRATEGIES[0][0] device_connection = DeviceConnection.objects.filter( - device_id=device.id, + device_id=self.related_object.id, update_strategy=openwrt_ssh, enabled=True, ).first() return device_connection - def _get_iperf_servers(self, organization_id): + def _get_iperf_servers(self): """ Get iperf test servers """ - org_servers = app_settings.IPERF_SERVERS.get(str(organization_id)) + org_servers = self._get_param('host', 'host.default') return org_servers def _exec_command(self, dc, command): @@ -177,13 +214,34 @@ def _connect(self, dc): """ return dc.connect() - def _get_param(self, param): + def _deep_get(self, dictionary, keys, default=None): + return reduce( + lambda d, key: d.get(key, default) if isinstance(d, dict) else default, + keys.split("."), + dictionary, + ) + + def _get_param(self, conf_key, default_conf_key): """ Gets specified param or its default value according to the schema """ - return self.params.get(param, self.schema['properties'][param]['default']) + org_id = str(self.related_object.organization.id) + iperf_config = app_settings.IPERF_CHECK_CONFIG + + if self.params: + check_params = self._deep_get(self.params, conf_key) + if check_params: + return check_params + + if iperf_config: + iperf_config = iperf_config[org_id] + iperf_config_param = self._deep_get(iperf_config, conf_key) + if iperf_config_param: + return iperf_config_param + + return self._deep_get(DEFAULT_IPERF_CHECK_CONFIG, default_conf_key) - def _get_iperf_result(self, result, exit_code, device, mode): + def _get_iperf_result(self, result, exit_code, mode): """ Returns iperf test result """ @@ -196,7 +254,9 @@ def _get_iperf_result(self, result, exit_code, device, mode): if mode == 'TCP': if exit_code != 0: - logger.warning(f'Iperf check failed for "{device}", {result["error"]}') + logger.warning( + f'Iperf check failed for "{self.related_object}", {result["error"]}' + ) return { 'iperf_result': 0, 'sent_bps_tcp': 0.0, @@ -219,7 +279,9 @@ def _get_iperf_result(self, result, exit_code, device, mode): elif mode == 'UDP': if exit_code != 0: - logger.warning(f'Iperf check failed for "{device}", {result["error"]}') + logger.warning( + f'Iperf check failed for "{self.related_object}", {result["error"]}' + ) return { 'iperf_result': 0, 'sent_bps_udp': 0.0, diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 1aaeff43f..4e1c6ef22 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -79,13 +79,13 @@ def _create_iperf_test_env(self): check = Check.objects.get(check_type=self._IPERF) return check, dc - def _set_auth_expected_calls(self, dc, config): - password = config['password']['default'] - username = config['username']['default'] + def _set_auth_expected_calls(self, dc, org_id, config): + password = config[org_id]['password'] + username = config[org_id]['username'] server = 'iperf.openwisptestserver.com' test_prefix = '-----BEGIN PUBLIC KEY-----\n' test_suffix = '\n-----END PUBLIC KEY-----' - key = config['rsa_public_key']['default'] + key = config[org_id]['rsa_public_key'] rsa_key_path = app_settings.IPERF_CHECK_RSA_KEY_PATH self._EXPECTED_COMMAND_CALLS = [ @@ -145,8 +145,8 @@ def test_iperf_check_no_params( self.assertEqual(result['total_packets'], udp_result['packets']) self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) @patch.object(Iperf, '_exec_command') @patch.object( @@ -165,14 +165,16 @@ def test_iperf_check_params( test_suffix = '\n-----END PUBLIC KEY-----' rsa_key_path = app_settings.IPERF_CHECK_RSA_KEY_PATH test_params = { - 'port': 6201, - 'time': 20, 'username': 'openwisp-test-user', 'password': 'openwisp_pass', 'rsa_public_key': TEST_RSA_KEY, + 'client_options': { + 'port': 6201, + 'time': 20, + }, } - time = test_params['time'] - port = test_params['port'] + time = test_params['client_options']['time'] + port = test_params['client_options']['port'] username = test_params['username'] password = test_params['password'] key = test_params['rsa_public_key'] @@ -205,22 +207,17 @@ def test_iperf_check_params( self.assertEqual(result['total_packets'], udp_result['packets']) self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) @patch.object(Iperf, '_exec_command') @patch.object( Iperf, '_get_iperf_servers', return_value=['iperf.openwisptestserver.com'] ) - @patch.object( - app_settings, - 'IPERF_CHECK_CONFIG', - { - 'port': {'default': 9201}, - 'time': {'default': 120}, - }, - ) - def test_iperf_check_config(self, mock_get_iperf_servers, mock_exec_command, *args): + @patch.object(iperf_logger, 'warning') + def test_iperf_check_config( + self, mock_warn, mock_get_iperf_servers, mock_exec_command + ): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] @@ -229,22 +226,33 @@ def test_iperf_check_config(self, mock_get_iperf_servers, mock_exec_command, *ar call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -J'), call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -u -J'), ] - with patch.object(Iperf, 'schema', get_iperf_schema()): - result = check.perform_check(store=False) - for key in self._RESULT_KEYS: - self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) - self.assertEqual( - result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] - ) - self.assertEqual( - result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] - ) - self.assertEqual(result['jitter'], udp_result['jitter_ms']) - self.assertEqual(result['total_packets'], udp_result['packets']) - self.assertEqual(mock_exec_command.call_count, 2) - mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + org_id = str(self.device.organization.id) + iperf_config = { + org_id: { + 'client_options': { + 'port': 9201, + 'time': 120, + } + } + } + with patch.object(app_settings, 'IPERF_CHECK_CONFIG', iperf_config): + with patch.object(Iperf, 'schema', get_iperf_schema()): + result = check.perform_check(store=False) + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @patch.object(iperf_logger, 'warning') def test_iperf_device_connection(self, mock_warn): @@ -303,37 +311,25 @@ def test_iperf_check_content_object_not_device(self): def test_iperf_check_schema_violation(self): device = self._create_device(organization=self._create_org()) invalid_params = [ - {'port': -1232}, - {'time': 0}, - {'port': 'invalid port'}, - {'time': 'invalid time'}, - {'port': '-12a'}, - {'time': '3test22'}, - {'port': 0}, - {'port': 797979}, - {'time': 36000}, - {'port': ''}, - {'time': ''}, {'username': 121}, {'password': -323}, - {'rsa-public-key-path': 112}, + {'rsa_public_key': 1334}, {'username': ''}, {'password': 0}, - {'rsa-public-key-path': '/only_path.pem'}, + {'rsa_public_key': 0}, { 'username': 'openwisp-test-user', 'password': 'open-pass', - 'rsa-public-key-path': '\dir\wrong_path.pem', + 'rsa_public_key': -1, }, { 'username': 1123, 'password': 'rossi', - 'rsa-public-key-path': 'root/public.pem', + 'rsa_public_key': '', }, { 'username': 'openwisp-test-user', 'password': -214, - 'rsa-public-key-path': 'root/public.pem', }, ] for invalid_param in invalid_params: @@ -369,7 +365,7 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) ) self.assertEqual(mock_warn.call_count, 1) self.assertEqual(mock_exec_command.call_count, 1) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_exec_command.reset_mock() mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() @@ -424,8 +420,8 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) mock_exec_command.reset_mock() mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() @@ -438,9 +434,9 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) mock_exec_command.reset_mock() mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() @@ -474,9 +470,9 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) mock_exec_command.reset_mock() mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() @@ -501,9 +497,9 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) self.assertEqual(Chart.objects.count(), 10) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with(self.device.organization.id) @patch.object(Iperf, '_exec_command') @patch.object( @@ -513,22 +509,30 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) def test_iperf_check_auth_config( self, mock_warn, mock_get_iperf_servers, mock_exec_command ): + + check, dc = self._create_iperf_test_env() + org_id = str(self.device.organization.id) iperf_config = { - 'username': {'default': 'test'}, - 'password': {'default': 'testpass'}, - 'rsa_public_key': {'default': TEST_RSA_KEY}, + org_id: { + 'username': 'test', + 'password': 'testpass', + 'rsa_public_key': TEST_RSA_KEY, + } } iperf_conf_wrong_pass = { - 'username': {'default': 'test'}, - 'password': {'default': 'wrongpass'}, - 'rsa_public_key': {'default': TEST_RSA_KEY}, + org_id: { + 'username': 'test', + 'password': 'wrongpass', + 'rsa_public_key': TEST_RSA_KEY, + } } iperf_conf_wrong_user = { - 'username': {'default': 'wronguser'}, - 'password': {'default': 'testpass'}, - 'rsa_public_key': {'default': TEST_RSA_KEY}, + org_id: { + 'username': 'wronguser', + 'password': 'testpass', + 'rsa_public_key': TEST_RSA_KEY, + } } - check, dc = self._create_iperf_test_env() auth_error = "test authorization failed" tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] @@ -545,7 +549,7 @@ def test_iperf_check_auth_config( # It is required to mock "Iperf.schema" here so that it # uses the updated configuration from "IPERF_CHECK_CONFIG" setting. ), patch.object(Iperf, 'schema', get_iperf_schema()): - self._set_auth_expected_calls(dc, iperf_config) + self._set_auth_expected_calls(dc, org_id, iperf_config) mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] result = check.perform_check(store=False) @@ -561,10 +565,8 @@ def test_iperf_check_auth_config( self.assertEqual(result['jitter'], udp_result['jitter_ms']) self.assertEqual(result['total_packets'], udp_result['packets']) self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) mock_exec_command.reset_mock() mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() @@ -573,7 +575,7 @@ def test_iperf_check_auth_config( with patch.object( app_settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_pass ), patch.object(Iperf, 'schema', get_iperf_schema()): - self._set_auth_expected_calls(dc, iperf_conf_wrong_pass) + self._set_auth_expected_calls(dc, org_id, iperf_conf_wrong_pass) mock_exec_command.side_effect = [ (RESULT_AUTH_FAIL, 1), (RESULT_AUTH_FAIL, 1), @@ -584,9 +586,7 @@ def test_iperf_check_auth_config( self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + self.assertEqual(mock_get_iperf_servers.call_count, 1) mock_exec_command.reset_mock() mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() @@ -595,7 +595,7 @@ def test_iperf_check_auth_config( with patch.object( app_settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_user ), patch.object(Iperf, 'schema', get_iperf_schema()): - self._set_auth_expected_calls(dc, iperf_conf_wrong_user) + self._set_auth_expected_calls(dc, org_id, iperf_conf_wrong_user) mock_exec_command.side_effect = [ (RESULT_AUTH_FAIL, 1), (RESULT_AUTH_FAIL, 1), @@ -606,6 +606,4 @@ def test_iperf_check_auth_config( self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - mock_get_iperf_servers.assert_called_once_with( - self.device.organization.id - ) + self.assertEqual(mock_get_iperf_servers.call_count, 1) diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index cbedcca60..87f4e706c 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -291,15 +291,6 @@ # Celery auto detects tasks only from INSTALLED_APPS CELERY_IMPORTS = ('openwisp_monitoring.device.tasks',) -OPENWISP_MONITORING_IPERF_SERVERS = { - # Running on my local - # Some Public Iperf Servers : https://iperf.fr/iperf-servers.php#public-servers - # 'be63c4e5-a68a-4650-bfe8-733837edb8be': ['iperf.biznetnetworks.com'], - # 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['speedtest.uztelecom.uz'], - 'a9734710-db30-46b0-a2fc-01f01046fe4f': ['192.168.5.109'], - # '': [''] -} - # local settings must be imported before test runner otherwise they'll be ignored try: from openwisp2.local_settings import * From c7b6cb1d96f90d3c14b5a3202e11be68816b0860 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 26 Jul 2022 18:56:33 +0530 Subject: [PATCH 51/64] [feature] Added --bitrate param to iperf check - Added bitrate param to tcp and udp mode. - Improved iperf tests. - Improved alert_on_related_field tests. --- openwisp_monitoring/check/classes/iperf.py | 28 ++++-- openwisp_monitoring/check/tasks.py | 8 +- .../check/tests/iperf_test_utils.py | 88 +++++++++++++++++++ openwisp_monitoring/check/tests/test_iperf.py | 51 +++++------ .../db/backends/influxdb/queries.py | 4 +- openwisp_monitoring/monitoring/base/models.py | 6 +- .../monitoring/configuration.py | 4 +- .../tests/test_monitoring_notifications.py | 32 +++++++ 8 files changed, 172 insertions(+), 49 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 3dfc6fc6e..43b702d16 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -3,7 +3,7 @@ from json import loads from json.decoder import JSONDecodeError -from django.core.exceptions import ValidationError +from django.core.exceptions import ImproperlyConfigured, ValidationError from jsonschema import draft7_format_checker, validate from jsonschema.exceptions import ValidationError as SchemaError from swapper import load_model @@ -53,21 +53,21 @@ # arbitrary chosen to avoid slowing down the queue (30min) 'maximum': 1800, }, - 'udp': { + 'tcp': { 'type': 'object', 'properties': { 'bitrate': { 'type': 'string', - 'default': '10M', + 'default': '0', } }, }, - 'tcp': { + 'udp': { 'type': 'object', 'properties': { 'bitrate': { 'type': 'string', - 'default': '0', + 'default': '10M', } }, }, @@ -119,6 +119,14 @@ def check(self, store=True): time = self._get_param( 'client_options.time', 'client_options.properties.time.default' ) + tcp_bitrate = self._get_param( + 'client_options.tcp.bitrate', + 'client_options.properties.tcp.properties.bitrate.default', + ) + udp_bitrate = self._get_param( + 'client_options.udp.bitrate', + 'client_options.properties.udp.properties.bitrate.default', + ) username = self._get_param('username', 'username.default') device_connection = self._get_device_connection() if not device_connection: @@ -133,8 +141,8 @@ def check(self, store=True): ) return server = self._get_iperf_servers()[0] - command_tcp = f'iperf3 -c {server} -p {port} -t {time} -J' - command_udp = f'iperf3 -c {server} -p {port} -t {time} -u -J' + command_tcp = f'iperf3 -c {server} -p {port} -t {time} -b {tcp_bitrate} -J' + command_udp = f'iperf3 -c {server} -p {port} -t {time} -b {udp_bitrate} -u -J' # All three parameters ie. username, password and rsa_public_key is required # for authentication to work, checking only username here @@ -146,10 +154,10 @@ def check(self, store=True): command_tcp = f'echo "{rsa_public_key}" > {rsa_public_key_path} && \ IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ - --username "{username}" --rsa-public-key-path {rsa_public_key_path} -J' + --username "{username}" --rsa-public-key-path {rsa_public_key_path} -b {tcp_bitrate} -J' command_udp = f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ - --username "{username}" --rsa-public-key-path {rsa_public_key_path} -u -J' + --username "{username}" --rsa-public-key-path {rsa_public_key_path} -b {udp_bitrate} -u -J' # If IPERF_CHECK_DELETE_RSA_KEY, remove rsa_public_key from the device if app_settings.IPERF_CHECK_DELETE_RSA_KEY: command_udp = f'{command_udp} && rm {rsa_public_key_path}' @@ -200,6 +208,8 @@ def _get_iperf_servers(self): Get iperf test servers """ org_servers = self._get_param('host', 'host.default') + if not org_servers: + raise ImproperlyConfigured(f'Iperf check host cannot be {org_servers}') return org_servers def _exec_command(self, dc, command): diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 0a5ed2010..76ce82bb1 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -30,9 +30,13 @@ def run_checks(checks=None): checks = CHECKS_LIST if not isinstance(checks, list): - raise ImproperlyConfigured(f'Check path {checks} should be of type "list"') + raise ImproperlyConfigured( + f'Check path {checks} should be of type "list"' + ) # pragma: no cover if not all(check_path in CHECKS_LIST for check_path in checks): - raise ImproperlyConfigured(f'Check path {checks} should be in {CHECKS_LIST}') + raise ImproperlyConfigured( + f'Check path {checks} should be in {CHECKS_LIST}' + ) # pragma: no cover iterator = ( get_check_model() diff --git a/openwisp_monitoring/check/tests/iperf_test_utils.py b/openwisp_monitoring/check/tests/iperf_test_utils.py index 6bdd735eb..93fb20f53 100644 --- a/openwisp_monitoring/check/tests/iperf_test_utils.py +++ b/openwisp_monitoring/check/tests/iperf_test_utils.py @@ -765,3 +765,91 @@ msapnWnBSxXt7Tbb++A5XbOMdM2mwNYDEtkD5ksC/x3EVBrI9FvENsH9+u/8J9Mf 2oPl4MnlCMY86MQypkeUn7eVWfDnseNky7TyC0/IgCXve/iaydCCFdkjyo1MTAA4 BQIDAQAB""" + +INVALID_PARAMS = [ + {'host': ''}, + {'host': 12}, + {'host': 'test.openwisp.io'}, + {'username': 121}, + {'password': -323}, + {'rsa_public_key': 1334}, + {'username': ''}, + {'password': 0}, + {'rsa_public_key': 0}, + { + 'username': 'openwisp-test-user', + 'password': 'open-pass', + 'rsa_public_key': -1, + }, + { + 'username': 1123, + 'password': 'rossi', + 'rsa_public_key': '', + }, + { + 'username': 'openwisp-test-user', + 'password': -214, + }, + { + 'client_options': { + 'port': 'testport', + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + } + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 'testport', + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 70000, + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': -21, + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'time': 1200000, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'time': 20, + 'tcp': {'bitrate': 10}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, +] diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index 4e1c6ef22..bf0a378a5 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -14,6 +14,7 @@ from .. import settings as app_settings from ..classes import Iperf from .iperf_test_utils import ( + INVALID_PARAMS, RESULT_AUTH_FAIL, RESULT_FAIL, RESULT_TCP, @@ -65,8 +66,10 @@ def _create_iperf_test_env(self): dc.connect() self.device = dc.device self._EXPECTED_COMMAND_CALLS = [ - call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 -J'), - call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 -u -J'), + call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 -b 0 -J'), + call( + dc, 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 -b 10M -u -J' + ), ] self._EXPECTED_WARN_CALLS = [ call( @@ -93,12 +96,12 @@ def _set_auth_expected_calls(self, dc, org_id, config): dc, f'echo "{test_prefix}{key}{test_suffix}" > {rsa_key_path} && \ IPERF3_PASSWORD="{password}" iperf3 -c {server} -p 5201 -t 10 \ - --username "{username}" --rsa-public-key-path {rsa_key_path} -J', + --username "{username}" --rsa-public-key-path {rsa_key_path} -b 0 -J', ), call( dc, f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p 5201 -t 10 \ - --username "{username}" --rsa-public-key-path {rsa_key_path} -u -J && rm {rsa_key_path}', + --username "{username}" --rsa-public-key-path {rsa_key_path} -b 10M -u -J && rm {rsa_key_path}', ), ] @@ -171,10 +174,14 @@ def test_iperf_check_params( 'client_options': { 'port': 6201, 'time': 20, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '30M'}, }, } time = test_params['client_options']['time'] port = test_params['client_options']['port'] + tcp_bitrate = test_params['client_options']['tcp']['bitrate'] + udp_bitrate = test_params['client_options']['udp']['bitrate'] username = test_params['username'] password = test_params['password'] key = test_params['rsa_public_key'] @@ -185,12 +192,12 @@ def test_iperf_check_params( dc, f'echo "{test_prefix}{key}{test_suffix}" > {rsa_key_path} && \ IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ - --username "{username}" --rsa-public-key-path {rsa_key_path} -J', + --username "{username}" --rsa-public-key-path {rsa_key_path} -b {tcp_bitrate} -J', ), call( dc, f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} \ - --username "{username}" --rsa-public-key-path {rsa_key_path} -u -J && rm {rsa_key_path}', + --username "{username}" --rsa-public-key-path {rsa_key_path} -b {udp_bitrate} -u -J && rm {rsa_key_path}', # noqa ), ] result = check.perform_check(store=False) @@ -223,8 +230,10 @@ def test_iperf_check_config( udp_result = loads(RESULT_UDP)['end']['sum'] check, dc = self._create_iperf_test_env() self._EXPECTED_COMMAND_CALLS = [ - call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -J'), - call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -u -J'), + call(dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -b 10M -J'), + call( + dc, 'iperf3 -c iperf.openwisptestserver.com -p 9201 -t 120 -b 50M -u -J' + ), ] org_id = str(self.device.organization.id) iperf_config = { @@ -232,6 +241,8 @@ def test_iperf_check_config( 'client_options': { 'port': 9201, 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, } } } @@ -310,29 +321,7 @@ def test_iperf_check_content_object_not_device(self): def test_iperf_check_schema_violation(self): device = self._create_device(organization=self._create_org()) - invalid_params = [ - {'username': 121}, - {'password': -323}, - {'rsa_public_key': 1334}, - {'username': ''}, - {'password': 0}, - {'rsa_public_key': 0}, - { - 'username': 'openwisp-test-user', - 'password': 'open-pass', - 'rsa_public_key': -1, - }, - { - 'username': 1123, - 'password': 'rossi', - 'rsa_public_key': '', - }, - { - 'username': 'openwisp-test-user', - 'password': -214, - }, - ] - for invalid_param in invalid_params: + for invalid_param in INVALID_PARAMS: check = Check( name='Iperf check', check_type=self._IPERF, diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 55559f8de..7597642c9 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -126,14 +126,14 @@ }, 'bandwidth_udp': { 'influxdb': ( - "SELECT MEAN(sent_bps_udp) / 1000000 AS sent FROM {key} " + "SELECT MEAN(sent_bps_udp) / 1000000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) }, 'transfer_udp': { 'influxdb': ( - "SELECT SUM(sent_bytes_udp) / 1000000 AS sent FROM {key} " + "SELECT SUM(sent_bytes_udp) / 1000000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' AND " "object_id = '{object_id}' GROUP BY time(1d) fill(none)" ) diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 6bb707159..1c59171a6 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -764,7 +764,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): # tolerance is set, we must go back in time # to ensure the threshold is trepassed for enough time # check if alert_on_related_field is present in metric configuration - if 'alert_on_related_field' in self.config_dict: + if 'alert_on_related_field' in self.metric.config_dict: alert_on_related_field = [self.metric.config_dict['alert_on_related_field']] else: alert_on_related_field = [] @@ -775,7 +775,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): limit=None, order='-time', retention_policy=retention_policy, - extra_values=alert_on_related_field, + extra_fields=alert_on_related_field, ) # store a list with the results results = [value_crossed] @@ -788,7 +788,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): utc_time = utc.localize(datetime.utcfromtimestamp(point['time'])) # did this point cross the threshold? Append to result list # check if alert_on_related_field is present in metric configuration - if 'alert_on_related_field' in self.config_dict: + if 'alert_on_related_field' in self.metric.config_dict: results.append( self._value_crossed( point[self.metric.config_dict['alert_on_related_field']] diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index df71dd638..6c97936e4 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -619,7 +619,7 @@ def _get_access_tech(): 'title': _('Bandwidth (UDP)'), 'description': _('Bitrate during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bitrate')], - 'unit': _(' Mbps'), + 'unit': _(' Gbps'), 'order': 310, 'query': chart_query['bandwidth_udp'], 'colors': [DEFAULT_COLORS[0]], @@ -629,7 +629,7 @@ def _get_access_tech(): 'title': _('Transfer (UDP)'), 'description': _('Transfer during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bytes')], - 'unit': _(' MB'), + 'unit': _(' GB'), 'order': 320, 'query': chart_query['transfer_udp'], 'colors': [DEFAULT_COLORS[5]], diff --git a/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py b/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py index 5883425fa..f5cba1de8 100644 --- a/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py +++ b/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py @@ -482,6 +482,38 @@ def test_alert_on_related_field(self): self.assertEqual(n.action_object, m.alertsettings) self.assertEqual(n.level, 'info') + def test_general_check_threshold_with_alert_on_rf_crossed_deferred(self): + admin = self._create_admin() + m = self._create_general_metric(configuration='test_alert_on_rf') + self._create_alert_settings( + metric=m, custom_operator='>', custom_threshold=30, custom_tolerance=1 + ) + m.write(10, time=ten_minutes_ago, extra_values={'test_related_2': 35}) + m.refresh_from_db() + self.assertEqual(m.is_healthy, False) + self.assertEqual(m.is_healthy_tolerant, False) + self.assertEqual(Notification.objects.count(), 1) + n = notification_queryset.first() + self.assertEqual(n.recipient, admin) + self.assertEqual(n.actor, m) + self.assertEqual(n.action_object, m.alertsettings) + self.assertEqual(n.level, 'warning') + + def test_general_check_threshold_with_alert_on_rf_deferred_not_crossed(self): + self._create_admin() + m = self._create_general_metric(configuration='test_alert_on_rf') + self._create_alert_settings( + metric=m, custom_operator='>', custom_threshold=30, custom_tolerance=1 + ) + m.write(10, extra_values={'test_related_2': 32}) + self.assertEqual(m.is_healthy, True) + self.assertEqual(m.is_healthy_tolerant, True) + self.assertEqual(Notification.objects.count(), 0) + m.write(20, extra_values={'test_related_2': 35}) + self.assertEqual(m.is_healthy, True) + self.assertEqual(m.is_healthy_tolerant, True) + self.assertEqual(Notification.objects.count(), 0) + class TestTransactionMonitoringNotifications(DeviceMonitoringTransactionTestcase): device_model = Device From 46d86f05471de61a7b6e7d67ddc9f2d1e4b17ecc Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Tue, 26 Jul 2022 22:29:51 +0530 Subject: [PATCH 52/64] [change] Added validation for settings config and non json error handling --- openwisp_monitoring/check/classes/iperf.py | 43 +++++----- .../check/tests/iperf_test_utils.py | 80 +++++++++++++++++++ openwisp_monitoring/check/tests/test_iperf.py | 26 ++++++ 3 files changed, 128 insertions(+), 21 deletions(-) diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf.py index 43b702d16..e14160ef5 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf.py @@ -3,7 +3,7 @@ from json import loads from json.decoder import JSONDecodeError -from django.core.exceptions import ImproperlyConfigured, ValidationError +from django.core.exceptions import ValidationError from jsonschema import draft7_format_checker, validate from jsonschema.exceptions import ValidationError as SchemaError from swapper import load_model @@ -82,10 +82,9 @@ def get_iperf_schema(): 'type': 'object', 'additionalProperties': True, 'dependencies': { - 'client_options': ['host'], - 'username': ['password', 'rsa_public_key', 'host'], - 'password': ['username', 'rsa_public_key', 'host'], - 'rsa_public_key': ['username', 'password', 'host'], + 'username': ['password', 'rsa_public_key'], + 'password': ['username', 'rsa_public_key'], + 'rsa_public_key': ['username', 'password'], }, } schema['properties'] = DEFAULT_IPERF_CHECK_CONFIG @@ -96,14 +95,11 @@ class Iperf(BaseCheck): schema = get_iperf_schema() - def validate_params(self): + def validate_params(self, params=None): try: - params = self.params - org_id = str(self.related_object.organization.id) - iperf_config = app_settings.IPERF_CHECK_CONFIG - if not params and iperf_config: - params = iperf_config[org_id] - validate(self.params, self.schema, format_checker=draft7_format_checker) + if not params: + params = self.params + validate(params, self.schema, format_checker=draft7_format_checker) except SchemaError as e: message = 'Invalid param' path = '/'.join(e.path) @@ -113,6 +109,11 @@ def validate_params(self): raise ValidationError({'params': message}) from e def check(self, store=True): + iperf_config = app_settings.IPERF_CHECK_CONFIG + if iperf_config: + org_id = str(self.related_object.organization.id) + self.validate_params(params=iperf_config[org_id]) + port = self._get_param( 'client_options.port', 'client_options.properties.port.default' ) @@ -175,15 +176,14 @@ def check(self, store=True): # UDP mode result, exit_code = self._exec_command(device_connection, command_udp) result_udp = self._get_iperf_result(result, exit_code, mode='UDP') - - if store: + result = {} + if store and result_tcp and result_udp: # Store iperf_result field 1 if any mode passes, store 0 when both fails iperf_result = result_tcp['iperf_result'] | result_udp['iperf_result'] - self.store_result( - {**result_tcp, **result_udp, 'iperf_result': iperf_result} - ) + result.update({**result_tcp, **result_udp, 'iperf_result': iperf_result}) + self.store_result(result) device_connection.disconnect() - return {**result_tcp, **result_udp, 'iperf_result': iperf_result} + return result def _get_compelete_rsa_key(self, key): pem_prefix = '-----BEGIN PUBLIC KEY-----\n' @@ -208,8 +208,6 @@ def _get_iperf_servers(self): Get iperf test servers """ org_servers = self._get_param('host', 'host.default') - if not org_servers: - raise ImproperlyConfigured(f'Iperf check host cannot be {org_servers}') return org_servers def _exec_command(self, dc, command): @@ -260,7 +258,10 @@ def _get_iperf_result(self, result, exit_code, mode): result = loads(result) except JSONDecodeError: # Errors other than iperf3 test errors - result = {'error': f'error - {result.strip()}'} + logger.warning( + f'Iperf check failed for "{self.related_object}", error - {result.strip()}' + ) + return if mode == 'TCP': if exit_code != 0: diff --git a/openwisp_monitoring/check/tests/iperf_test_utils.py b/openwisp_monitoring/check/tests/iperf_test_utils.py index 93fb20f53..6a730f501 100644 --- a/openwisp_monitoring/check/tests/iperf_test_utils.py +++ b/openwisp_monitoring/check/tests/iperf_test_utils.py @@ -757,6 +757,86 @@ "error": "error - test authorization failed" } """ +PARAM_ERROR = """Usage: iperf3 [-s|-c host] [options] + iperf3 [-h|--help] [-v|--version] + +Server or Client: + -p, --port # server port to listen on/connect to + -f, --format [kmgtKMGT] format to report: Kbits, Mbits, Gbits, Tbits + -i, --interval # seconds between periodic throughput reports + -F, --file name xmit/recv the specified file + -A, --affinity n/n,m set CPU affinity + -B, --bind bind to the interface associated with the address + -V, --verbose more detailed output + -J, --json output in JSON format + --logfile f send output to a log file + --forceflush force flushing output at every interval + -d, --debug emit debugging output + -v, --version show version information and quit + -h, --help show this message and quit +Server specific: + -s, --server run in server mode + -D, --daemon run the server as a daemon + -I, --pidfile file write PID file + -1, --one-off handle one client connection then exit + --rsa-private-key-path path to the RSA private key used to decrypt + authentication credentials + --authorized-users-path path to the configuration file containing user + credentials +Client specific: + -c, --client run in client mode, connecting to + -u, --udp use UDP rather than TCP + --connect-timeout # timeout for control connection setup (ms) + -b, --bitrate #[KMG][/#] target bitrate in bits/sec (0 for unlimited) + (default 1 Mbit/sec for UDP, unlimited for TCP) + (optional slash and packet count for burst mode) + --pacing-timer #[KMG] set the timing for pacing, in microseconds (default 1000) + --fq-rate #[KMG] enable fair-queuing based socket pacing in + bits/sec (Linux only) + -t, --time # time in seconds to transmit for (default 10 secs) + -n, --bytes #[KMG] number of bytes to transmit (instead of -t) + -k, --blockcount #[KMG] number of blocks (packets) to transmit (instead of -t or -n) + -l, --length #[KMG] length of buffer to read or write + (default 128 KB for TCP, dynamic or 1460 for UDP) + --cport bind to a specific client port (TCP and UDP, default: ephemeral port) + -P, --parallel # number of parallel client streams to run + -R, --reverse run in reverse mode (server sends, client receives) + --bidir run in bidirectional mode. + Client and server send and receive data. + -w, --window #[KMG] set window size / socket buffer size + -C, --congestion set TCP congestion control algorithm (Linux and FreeBSD only) + -M, --set-mss # set TCP/SCTP maximum segment size (MTU - 40 bytes) + -N, --no-delay set TCP/SCTP no delay, disabling Nagle's Algorithm + -4, --version4 only use IPv4 + -6, --version6 only use IPv6 + -S, --tos N set the IP type of service, 0-255. + The usual prefixes for octal and hex can be used, + i.e. 52, 064 and 0x34 all specify the same value. + --dscp N or --dscp val set the IP dscp value, either 0-63 or symbolic. + Numeric values can be specified in decimal, + octal and hex (see --tos above). + -L, --flowlabel N set the IPv6 flow label (only supported on Linux) + -Z, --zerocopy use a 'zero copy' method of sending data + -O, --omit N omit the first n seconds + -T, --title str prefix every output line with this string + --extra-data str data string to include in client and server JSON + --get-server-output get results from server + --udp-counters-64bit use 64-bit counters in UDP test packets + --repeating-payload use repeating pattern in payload, instead of + randomized payload (like in iperf2) + --username username for authentication + --rsa-public-key-path path to the RSA public key used to encrypt + authentication credentials + +[KMG] indicates options that support a K/M/G suffix for kilo-, mega-, or giga- + +iperf3 homepage at: https://software.es.net/iperf/ +Report bugs to: https://github.com/esnet/iperf +iperf3: parameter error - you must specify username (max 20 chars), password (max 20 chars) and a path to a valid public rsa client to be used + +Keyword arguments: +argument -- description +Return: return_description""" TEST_RSA_KEY = """MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwuEm+iYrfSWJOupy6X3N dxZvUCxvmoL3uoGAs0O0Y32unUQrwcTIxudy38JSuCccD+k2Rf8S4WuZSiTxaoea diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf.py index bf0a378a5..5fbd2fff0 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf.py @@ -15,6 +15,7 @@ from ..classes import Iperf from .iperf_test_utils import ( INVALID_PARAMS, + PARAM_ERROR, RESULT_AUTH_FAIL, RESULT_FAIL, RESULT_TCP, @@ -359,6 +360,31 @@ def test_iperf_check(self, mock_warn, mock_get_iperf_servers, mock_exec_command) mock_get_iperf_servers.reset_mock() mock_warn.reset_mock() + with self.subTest('Test iperf3 errors not in json format'): + with patch.object( + app_settings, + 'IPERF_CHECK_RSA_KEY_PATH', + '/invalid_path/iperf-rsa-public.pem', + ): + dir_error = "ash: can't create /invalid_path/iperf-rsa-public.pem: nonexistent directory" + mock_exec_command.side_effect = [(dir_error, 1), (PARAM_ERROR, 1)] + EXPECTED_WARN_CALLS = [ + call( + f'Iperf check failed for "{self.device}", error - {dir_error}' + ), + call( + f'Iperf check failed for "{self.device}", error - {PARAM_ERROR}' + ), + ] + check.perform_check(store=False) + self.assertEqual(mock_warn.call_count, 2) + self.assertEqual(mock_exec_command.call_count, 2) + self.assertEqual(mock_get_iperf_servers.call_count, 1) + mock_warn.assert_has_calls(EXPECTED_WARN_CALLS) + mock_exec_command.reset_mock() + mock_get_iperf_servers.reset_mock() + mock_warn.reset_mock() + with self.subTest('Test iperf check passes in both TCP & UDP'): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] self.assertEqual(Chart.objects.count(), 2) From f9a4f9877a0e8ad6d2d55d4c77f2acd818080d1e Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Wed, 27 Jul 2022 17:56:55 +0530 Subject: [PATCH 53/64] [change] Added connect_points to iperf charts --- .../db/backends/influxdb/queries.py | 16 ++++++++-------- openwisp_monitoring/monitoring/base/models.py | 5 +++++ openwisp_monitoring/monitoring/configuration.py | 13 +++++++++---- .../monitoring/static/monitoring/js/chart.js | 14 ++++++++++++++ .../monitoring/tests/test_charts.py | 1 + openwisp_monitoring/views.py | 2 ++ 6 files changed, 39 insertions(+), 12 deletions(-) diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 7597642c9..e54b1a315 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -105,7 +105,7 @@ "SELECT MEAN(received_bps_tcp) / 1000000000 AS received, " "MEAN(sent_bps_tcp) / 1000000000 AS sent FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "object_id = '{object_id}' GROUP BY time(1d)" ) }, 'transfer_tcp': { @@ -114,35 +114,35 @@ "SUM(sent_bytes_tcp) / 1000000000 AS sent," "((SUM(sent_bytes_tcp) + SUM(received_bytes_tcp)) / 1000000000) AS total FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "object_id = '{object_id}' GROUP BY time(1d)" ) }, 'retransmits': { 'influxdb': ( "SELECT MEAN(retransmits) AS retransmits FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, 'bandwidth_udp': { 'influxdb': ( "SELECT MEAN(sent_bps_udp) / 1000000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, 'transfer_udp': { 'influxdb': ( "SELECT SUM(sent_bytes_udp) / 1000000000 AS sent FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' AND " - "object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "object_id = '{object_id}' GROUP BY time(1d)" ) }, 'jitter': { 'influxdb': ( "SELECT MEAN(jitter) AS jitter FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, 'datagram': { @@ -150,14 +150,14 @@ "SELECT MEAN(lost_packets) AS lost_datagram," "MEAN(total_packets) AS total_datagram FROM {key} WHERE " "time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, 'datagram_loss': { 'influxdb': ( "SELECT MEAN(lost_percent) AS datagram_loss FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d) fill(none)" + "AND object_id = '{object_id}' GROUP BY time(1d)" ) }, } diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 1c59171a6..384428ba3 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -451,6 +451,10 @@ def trace_type(self): def trace_order(self): return self.config_dict.get('trace_order', []) + @property + def connect_points(self): + return self.config_dict.get('connect_points', False) + @property def description(self): return self.config_dict['description'].format( @@ -636,6 +640,7 @@ def json(self, time=DEFAULT_TIME, **kwargs): 'unit': self.unit, 'trace_type': self.trace_type, 'trace_order': self.trace_order, + 'connect_points': self.connect_points, 'colors': self.colors, } ) diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 6c97936e4..531fe8402 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -564,8 +564,9 @@ def _get_access_tech(): 'alert_on_related_field': 'jitter', 'charts': { 'bandwidth_tcp': { - 'type': 'stackedbar', + 'type': 'scatter', 'fill': 'none', + 'connect_points': True, 'title': _('Bandwidth (TCP)'), 'description': _('Bitrate during Iperf3 test in TCP mode.'), 'summary_labels': [ @@ -579,6 +580,7 @@ def _get_access_tech(): }, 'transfer_tcp': { 'type': 'stackedbar+lines', + 'connect_points': True, 'title': _('Transfer (TCP)'), 'trace_type': { 'received': 'stackedbar', @@ -603,9 +605,7 @@ def _get_access_tech(): }, 'retransmits': { 'type': 'scatter', - 'trace_type': { - 'retransmits': 'lines', - }, + 'connect_points': True, 'title': _('Retransmits'), 'description': _('No. of retransmits during Iperf3 test in TCP mode.'), 'summary_labels': [_('Restransmits')], @@ -616,6 +616,7 @@ def _get_access_tech(): }, 'bandwidth_udp': { 'type': 'scatter', + 'connect_points': True, 'title': _('Bandwidth (UDP)'), 'description': _('Bitrate during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bitrate')], @@ -626,6 +627,7 @@ def _get_access_tech(): }, 'transfer_udp': { 'type': 'scatter', + 'connect_points': True, 'title': _('Transfer (UDP)'), 'description': _('Transfer during Iperf3 test in UDP mode.'), 'summary_labels': [_('Sent bytes')], @@ -636,6 +638,7 @@ def _get_access_tech(): }, 'jitter': { 'type': 'scatter', + 'connect_points': True, 'title': _('Jitter'), 'description': _( 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode.' @@ -651,6 +654,7 @@ def _get_access_tech(): 'datagram': { 'type': 'scatter', 'fill': 'none', + 'connect_points': True, 'title': _('Datagram'), 'description': _( 'Lost/Total datagram ratio measured by Iperf3 test in UDP mode.' @@ -666,6 +670,7 @@ def _get_access_tech(): }, 'datagram_loss': { 'type': 'scatter', + 'connect_points': True, 'title': _('Datagram Loss'), 'description': _( 'Indicates datagram loss during Iperf3 test in UDP mode.' diff --git a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js index 0baa83483..5e4d07f8d 100644 --- a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js +++ b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js @@ -178,6 +178,7 @@ // We use the "_key" field to sort the charts // according to the order defined in "data.trace_order" _key: key, + _connectPoints : data.connect_points || false, }, yValuesRaw = data.traces[i][1]; if (type !== 'histogram') { @@ -216,6 +217,11 @@ layout.margin.b = 45; } } + + var xValuesRaw = options.x; + if(options._connectPoints) { + options.x = []; + } // adjust text to be displayed in Y values // differentiate between values with zero and no values at all (N/A) for (var c=0; c