From 3bc571fe6f5eef5b33c796b609cc8ae6881f23c1 Mon Sep 17 00:00:00 2001 From: Yashika Jotwani Date: Wed, 15 Jun 2022 02:12:28 +0530 Subject: [PATCH 001/133] [feature] Device status: make bridge members clickable #352 Fixes #352 --- .../admin/config/device/change_form.html | 9 ++++-- .../device/tests/test_admin.py | 31 ++++++++++++++++++- 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/openwisp_monitoring/device/templates/admin/config/device/change_form.html b/openwisp_monitoring/device/templates/admin/config/device/change_form.html index f06bceae4..60c99042c 100644 --- a/openwisp_monitoring/device/templates/admin/config/device/change_form.html +++ b/openwisp_monitoring/device/templates/admin/config/device/change_form.html @@ -110,7 +110,7 @@

{% trans 'Storage' %}

{% endif %} {% if device_data.interfaces %} {% for interface in device_data.interfaces %} -
+

{% trans 'Interface status' %}: {{ interface.name }}

{% if interface.mac %}
@@ -334,7 +334,9 @@

{% trans 'Interface status' %}: {{ interface.name }}

- {{ interface.bridge_members|join:", " }} + {% for element in interface.bridge_members %} + {{ element }}{% if not forloop.last %}, {% endif %} + {% endfor %}
{% endif %} @@ -545,6 +547,9 @@

{% trans 'Charts' %}

} }); }); + function scrollToElement(id) { + document.getElementById(id).scrollIntoView(); + } {% endif %} {% endblock %} diff --git a/openwisp_monitoring/device/tests/test_admin.py b/openwisp_monitoring/device/tests/test_admin.py index 28d4d5096..19dae190c 100644 --- a/openwisp_monitoring/device/tests/test_admin.py +++ b/openwisp_monitoring/device/tests/test_admin.py @@ -181,7 +181,36 @@ def test_interface_bridge_admin(self): r1 = self.client.get(url, follow=True) self.assertEqual(r1.status_code, 200) self.assertContains(r1, 'Bridge Members') - self.assertContains(r1, 'tap0, wlan0, wlan1') + self.assertContains( + r1, + """ + + tap0 + + """, + html=True, + ) + self.assertContains( + r1, + """ + + wlan0 + + """, + html=True, + ) + self.assertContains( + r1, + """ + + wlan1 + + """, + html=True, + ) self.assertContains(r1, 'Spanning Tree Protocol') def test_interface_mobile_admin(self): From cf67548393162a00e3773ecca8e99605cba185fd Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Fri, 17 Jun 2022 15:13:21 -0400 Subject: [PATCH 002/133] [fix] Load image using static() in DeviceAdmin.health_checks --- openwisp_monitoring/device/admin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openwisp_monitoring/device/admin.py b/openwisp_monitoring/device/admin.py index 493f0ce1c..e415e4732 100644 --- a/openwisp_monitoring/device/admin.py +++ b/openwisp_monitoring/device/admin.py @@ -166,9 +166,9 @@ def health_checks(self, obj): metric_rows = [] for metric in DeviceData(pk=obj.pk).metrics.filter(alertsettings__isnull=False): health = 'yes' if metric.is_healthy else 'no' + icon_url = static(f'admin/img/icon-{health}.svg') metric_rows.append( - f'
  • {metric.name}
  • ' + f'
  • {metric.name}
  • ' ) return format_html( mark_safe(f'
      {"".join(metric_rows)}
    ') From bb728e246c9cd4586fdd6dd53d9cce1b051f9b53 Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Wed, 22 Jun 2022 01:16:02 +0530 Subject: [PATCH 003/133] [fix] Fixed static files path in device status page --- .../admin/config/device/change_form.html | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openwisp_monitoring/device/templates/admin/config/device/change_form.html b/openwisp_monitoring/device/templates/admin/config/device/change_form.html index 60c99042c..b8c8d451f 100644 --- a/openwisp_monitoring/device/templates/admin/config/device/change_form.html +++ b/openwisp_monitoring/device/templates/admin/config/device/change_form.html @@ -344,7 +344,7 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    - +
    {% endif %} @@ -352,7 +352,7 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    - +
    {% endif %} @@ -360,7 +360,7 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    - +
    {% endif %} @@ -419,19 +419,19 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    {% endif %} - + - + - + - + - + {% endfor %} From 24b5bc6a8a745ec1f0f11090ccbfb08362d86c82 Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Wed, 22 Jun 2022 19:49:37 +0530 Subject: [PATCH 004/133] [fix] Fixed migrate_timeseries stalling due to retention policy #401 When writing data to the InfluxDB if the measurement points are older than the retention policy, the InfluxDB returns a HTTP 400 response. Retrying this operation will again result in HTTP 400, hence this error is assumed as success. Fixes #401 --- .../migrations/influxdb/influxdb_alter_structure_0006.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openwisp_monitoring/monitoring/migrations/influxdb/influxdb_alter_structure_0006.py b/openwisp_monitoring/monitoring/migrations/influxdb/influxdb_alter_structure_0006.py index 623ffafeb..406bc4ea2 100644 --- a/openwisp_monitoring/monitoring/migrations/influxdb/influxdb_alter_structure_0006.py +++ b/openwisp_monitoring/monitoring/migrations/influxdb/influxdb_alter_structure_0006.py @@ -66,6 +66,13 @@ def retry_until_success(func, *args, **kwargs): InfluxDBServerError, timeseries_db.client_error, ) as error: + if 'points beyond retention policy dropped' in str(error): + # When writing data to the InfluxDB, if the measurement + # points are older than the retention policy the + # InfluxDB returns a HTTP 400 response. Retrying this + # operation will again result in HTTP 400, hence + # this error is assumed as success. + return True sleep_time *= 2 time.sleep(sleep_time) logger.warning( From 6119ac0dfa7221e7e06d88fdcd628ec0d629a3a7 Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Thu, 23 Jun 2022 01:15:23 +0530 Subject: [PATCH 005/133] [fix] Fixed static files path in device status page #400 Related to #400 --- .../admin/config/device/change_form.html | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openwisp_monitoring/device/templates/admin/config/device/change_form.html b/openwisp_monitoring/device/templates/admin/config/device/change_form.html index b8c8d451f..f97c3b20f 100644 --- a/openwisp_monitoring/device/templates/admin/config/device/change_form.html +++ b/openwisp_monitoring/device/templates/admin/config/device/change_form.html @@ -344,7 +344,7 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    - +
    {% endif %} @@ -352,7 +352,7 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    - +
    {% endif %} @@ -360,7 +360,7 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    - +
    {% endif %} @@ -419,19 +419,19 @@

    {% trans 'Interface status' %}: {{ interface.name }}

    {% endif %} - + - + - + - + - + {% endfor %} From efbc45c70c1b7b92cd12e084e2c9154da89a38b4 Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Sat, 2 Jul 2022 23:52:13 +0530 Subject: [PATCH 006/133] [fix] Fixed monitoring test failing due to openwisp-notification 1.0.2 Bug: In openwisp-notification 1.0.2, the operation for creating notification settings when an organization is created is wrapped in transaction.on_commit. TestMonitoringNotifications.test_cpu_metric_threshold_crossed was failing because of this. Fix: Moved this test to a TransactionTestCase class. --- .../tests/test_monitoring_notifications.py | 46 +++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py b/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py index 3817b5a95..81ffac53e 100644 --- a/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py +++ b/openwisp_monitoring/monitoring/tests/test_monitoring_notifications.py @@ -106,29 +106,6 @@ def test_resources_metric_threshold_deferred_not_crossed(self): self.create_test_data() self.assertEqual(Notification.objects.count(), 0) - def test_cpu_metric_threshold_crossed(self): - admin = self._create_admin() - org = self._create_org() - device = self._create_device(organization=org) - # creates metric and alert settings - data = self._data() - data['resources']['load'] = [0.99, 0.99, 0.99] - response = self._post_data(device.id, device.key, data) - self.assertEqual(response.status_code, 200) - # retrieve created metric - metric = Metric.objects.get(name='CPU usage') - # simplify test by setting tolerance to 0 - metric.alertsettings.custom_tolerance = 0 - metric.alertsettings.save() - # trigger alert - metric.write(99.0) - self.assertEqual(Notification.objects.count(), 1) - n = Notification.objects.first() - self.assertEqual(n.recipient, admin) - self.assertEqual(n.actor, metric) - self.assertEqual(n.action_object, metric.alertsettings) - self.assertEqual(n.level, 'warning') - def test_general_check_threshold_crossed_for_long_time(self): """ this is going to be the most realistic scenario: @@ -438,6 +415,29 @@ def _check_notification_parameters(self, notification, recepient, metric, target self.assertEqual(notification.level, 'warning') self.assertEqual(notification.verb, 'is not reachable') + def test_cpu_metric_threshold_crossed(self): + admin = self._create_admin() + org = self._create_org() + device = self._create_device(organization=org) + # creates metric and alert settings + data = self._data() + data['resources']['load'] = [0.99, 0.99, 0.99] + response = self._post_data(device.id, device.key, data) + self.assertEqual(response.status_code, 200) + # retrieve created metric + metric = Metric.objects.get(name='CPU usage') + # simplify test by setting tolerance to 0 + metric.alertsettings.custom_tolerance = 0 + metric.alertsettings.save() + # trigger alert + metric.write(99.0) + self.assertEqual(Notification.objects.count(), 1) + n = Notification.objects.first() + self.assertEqual(n.recipient, admin) + self.assertEqual(n.actor, metric) + self.assertEqual(n.action_object, metric.alertsettings) + self.assertEqual(n.level, 'warning') + def test_multiple_notifications(self): testorg = self._create_org() admin = self._create_admin() From b288ba22a9af255983f830c410751ffdc82b8327 Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Mon, 4 Jul 2022 22:13:27 +0530 Subject: [PATCH 007/133] [docs] Added changelog for 1.0.1 release (cherry picked from commit d49f0768a10eccb971f2362b7035ddb7a85b31f0) [skip ci] --- CHANGES.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index acfeb5149..674a305c3 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,19 @@ Version 1.1.0 [unreleased] WIP +Version 1.0.1 [2022-07-01] +-------------------------- + +Bugfixes +~~~~~~~~ + +- Removed hardcoded static URLs which created + issues when static files are served using an + external service (e.g. S3 storage buckets) +- Fixed `"migrate_timeseries" command stalling + when measurements exceeds retention policy + `_ + Version 1.0.0 [2022-05-05] -------------------------- From acdc855ced84bdb40c9cf0445e59b10f6334659f Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Thu, 14 Jul 2022 15:35:45 -0400 Subject: [PATCH 008/133] [change] Added admin_auto_filters to INSTALLED_APPS This feature was added to OpenWISP Controller. --- README.rst | 1 + tests/openwisp2/settings.py | 1 + 2 files changed, 2 insertions(+) diff --git a/README.rst b/README.rst index 5c162920f..e1be061f3 100644 --- a/README.rst +++ b/README.rst @@ -310,6 +310,7 @@ Follow the setup instructions of `openwisp-controller 'openwisp_notifications', # openwisp2 admin theme (must be loaded here) 'openwisp_utils.admin_theme', + 'admin_auto_filters', # admin 'django.contrib.admin', 'django.forms', diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 372eeabc6..e3559e13e 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -62,6 +62,7 @@ # openwisp2 admin theme # (must be loaded here) 'openwisp_utils.admin_theme', + 'admin_auto_filters', 'django.contrib.admin', 'django.forms', # other dependencies From dc3b3ab5e297f4b78bb2369efe8424bebc644118 Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Fri, 15 Jul 2022 18:13:43 -0400 Subject: [PATCH 009/133] [docs] Fixed docs/1.1/dashboard-charts.png image [skip ci] --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index e1be061f3..a7e9530db 100644 --- a/README.rst +++ b/README.rst @@ -806,7 +806,7 @@ Mobile Access Technology in use Dashboard Monitoring Charts --------------------------- -.. figure:: https://raw.githubusercontent.com/openwisp/openwisp-controller/docs/docs/1.1/dashboard-charts.png +.. figure:: https://github.com/openwisp/openwisp-monitoring/blob/docs/docs/1.1/dashboard-charts.png :align: center OpenWISP Monitoring adds two timeseries charts to the admin dashboard: From 76942a31e0d655a246eb846c94434fc53e14795c Mon Sep 17 00:00:00 2001 From: Yashika Jotwani Date: Tue, 19 Jul 2022 23:45:55 +0530 Subject: [PATCH 010/133] [ux] Show size in (KB, MB or GB) adaptively in charts #87 Fixes #87 Co-authored-by: Federico Capoano Co-authored-by: Sankalp --- README.rst | 36 ++++++--- .../monitoring/configuration.py | 9 +-- .../monitoring/static/monitoring/js/chart.js | 79 +++++++++++++++++++ .../monitoring/tests/test_api.py | 4 +- 4 files changed, 108 insertions(+), 20 deletions(-) diff --git a/README.rst b/README.rst index a7e9530db..2792d49ee 100644 --- a/README.rst +++ b/README.rst @@ -819,6 +819,26 @@ You can configure the interfaces included in the **General traffic chart** using the `"OPENWISP_MONITORING_DASHBOARD_TRAFFIC_CHART" <#openwisp_monitoring_dashboard_traffic_chart>`_ setting. +Adaptive byte charts +-------------------- + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/adaptive-chart.png + :align: center + +When configuring charts, it is possible to flag their unit +as ``adaptive_bytes``, this allows to make the charts more readable because +the units are shown in either `B`, `KB`, `MB`, `GB` and `TB` depending on +the size of each point, the summary values and Y axis are also resized. + +Example taken from the default configuration of the traffic chart: + +.. code-block:: python + + 'traffic': { + # other configurations for this chart + 'unit': 'adaptive_bytes', + }, + Monitoring WiFi Sessions ------------------------ @@ -1359,26 +1379,16 @@ This setting allows to define additional charts or to override the default chart configuration defined in ``openwisp_monitoring.monitoring.configuration.DEFAULT_CHARTS``. -For example, if you want to change the traffic chart to show -MB (megabytes) instead of GB (Gigabytes) you can use: +In the following example, we modify the description of the traffic chart: .. code-block:: python OPENWISP_MONITORING_CHARTS = { 'traffic': { - 'unit': ' MB', 'description': ( 'Network traffic, download and upload, measured on ' - 'the interface "{metric.key}", measured in MB.' + 'the interface "{metric.key}", custom message here.' ), - 'query': { - 'influxdb': ( - "SELECT SUM(tx_bytes) / 1000000 AS upload, " - "SUM(rx_bytes) / 1000000 AS download FROM {key} " - "WHERE time >= '{time}' AND content_type = '{content_type}' " - "AND object_id = '{object_id}' GROUP BY time(1d)" - ) - }, } } @@ -1413,7 +1423,7 @@ In case you just want to change the colors used in a chart here's how to do it: OPENWISP_MONITORING_CHARTS = { 'traffic': { - 'colors': ['#000000', '#cccccc'] + 'colors': ['#000000', '#cccccc', '#111111'] } } diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 0106508c4..9c097e01c 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -199,14 +199,14 @@ def _get_access_tech(): 'label': _('Traffic'), 'description': _( 'Network traffic (total, download and upload) ' - 'of the interface "{ifname}", measured in GB.' + 'of the interface "{ifname}".' ), 'summary_labels': [ _('Total traffic'), _('Total download traffic'), _('Total upload traffic'), ], - 'unit': _(' GB'), + 'unit': 'adaptive_bytes', 'order': 240, 'query': chart_query['traffic'], 'colors': [ @@ -235,15 +235,14 @@ def _get_access_tech(): 'title': _('General Traffic'), 'label': _('General Traffic'), 'description': _( - 'Network traffic of the whole network' - ' (total, download, upload) measured in GB.' + 'Network traffic of the whole network (total, download, upload).' ), 'summary_labels': [ _('Total traffic'), _('Total download traffic'), _('Total upload traffic'), ], - 'unit': _(' GB'), + 'unit': 'adaptive_bytes', 'order': 240, 'query': chart_query['general_traffic'], 'query_default_param': { diff --git a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js index c90b65eed..ee6933116 100644 --- a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js +++ b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js @@ -16,6 +16,72 @@ } return newArr; } + + function getAdaptiveScale(value, multiplier, unit) { + if (value == 0) { + multiplier = 1; + unit = 'B'; + } else if (value < 0.001) { + multiplier = 1000000; + unit = 'KB'; + } else if (value < 1) { + multiplier = 1000; + unit = 'MB'; + } else if (value < 1000) { + multiplier = 1; + unit = 'GB'; + } else if (value >= 1000) { + multiplier = 0.001; + unit = 'TB'; + } + return { + multiplier: multiplier, + unit: unit + }; + } + + function getAdaptiveBytes(value, multiplier) { + return Math.round((value * multiplier) * 100) / 100; + } + + function adaptiveFilterPoints(charts, layout, yRawVal) { + var y = charts[0].y, sum = 0, count = 0, shownVal, average; + for (var i=0; i < y.length; i++) { + sum += y[i]; + if (y[i]) { + count++; + } + } + average = sum / count; + var scales = getAdaptiveScale(average, 1, ''); + var multiplier = scales.multiplier, + unit = scales.unit; + for (i=0; i < y.length; i++) { + for (var j=0; j < charts.length; j++) { + if (yRawVal[i] == null) { + charts[j].hovertemplate[i] = 'N/A' + ''; + continue; + } + shownVal = charts[j].y[i]; + charts[j].y[i] = getAdaptiveBytes(charts[j].y[i], multiplier); + var hoverScales = getAdaptiveScale(shownVal, 1, ''); + var hoverMultiplier = hoverScales.multiplier, + hoverUnit = hoverScales.unit; + shownVal = getAdaptiveBytes(shownVal, hoverMultiplier); + charts[j].hovertemplate[i] = shownVal + ' ' + hoverUnit; + } + } + layout.yaxis.title = unit; + } + + function adaptiveFilterSummary(i, percircles, value) { + var scales = getAdaptiveScale(value, 1, ''), + multiplier = scales.multiplier, + unit = scales.unit; + value = getAdaptiveBytes(value, multiplier); + percircles[i].text = value + ' ' + unit; + } + window.createChart = function (data, x, id, title, type, quickLink) { if (data === false) { alert(gettext('error while receiving data from server')); @@ -193,6 +259,15 @@ charts.push(options); } charts = sortByTraceOrder(data.trace_order, charts, '_key'); + + if (unit == 'adaptive_bytes') { + var yRawVal; + for (i=0; i < charts.length; i++) { + yRawVal = data.traces[i][1]; + } + adaptiveFilterPoints(charts, layout, yRawVal); + } + if (fixedY) { layout.yaxis = {range: [0, fixedYMax]}; } Plotly.newPlot(plotlyContainer, charts, layout, {responsive: true}); @@ -265,6 +340,10 @@ percircleOptions.progressBarColor = data.colors[data.trace_order.indexOf(key)]; } percircles.push(percircleOptions); + + if (unit == 'adaptive_bytes') { + adaptiveFilterSummary(i, percircles, value); + } } percircles = sortByTraceOrder(data.trace_order, percircles, '_key'); for (i=0; i Date: Thu, 4 Aug 2022 15:59:21 +0200 Subject: [PATCH 011/133] [fix] Fix creation of checks for existing devices #348 Fixes #348 --- .../check/migrations/0003_create_ping.py | 24 +------ .../migrations/0005_create_config_applied.py | 34 +--------- .../check/migrations/0007_create_checks.py | 62 +++++++++++++++++++ 3 files changed, 64 insertions(+), 56 deletions(-) create mode 100644 openwisp_monitoring/check/migrations/0007_create_checks.py diff --git a/openwisp_monitoring/check/migrations/0003_create_ping.py b/openwisp_monitoring/check/migrations/0003_create_ping.py index b1a2ef4a6..76e5d417b 100644 --- a/openwisp_monitoring/check/migrations/0003_create_ping.py +++ b/openwisp_monitoring/check/migrations/0003_create_ping.py @@ -1,32 +1,10 @@ -import swapper from django.db import migrations -from openwisp_monitoring.check.settings import AUTO_PING -from openwisp_monitoring.check.tasks import auto_create_ping - - -def create_device_ping(apps, schema_editor): - if AUTO_PING: - ContentType = apps.get_model('contenttypes', 'ContentType') - Check = apps.get_model('check', 'Check') - Device = apps.get_model('config', 'Device') - for device in Device.objects.all(): - auto_create_ping( - model=Device.__name__.lower(), - app_label=Device._meta.app_label, - object_id=str(device.pk), - check_model=Check, - content_type_model=ContentType, - ) - class Migration(migrations.Migration): dependencies = [ ('check', '0001_initial_squashed_0002_check_unique_together'), - swapper.dependency('monitoring', 'Metric'), ] - operations = [ - migrations.RunPython(create_device_ping, reverse_code=migrations.RunPython.noop) - ] + operations = [] diff --git a/openwisp_monitoring/check/migrations/0005_create_config_applied.py b/openwisp_monitoring/check/migrations/0005_create_config_applied.py index 2c64c40fa..8f6486c45 100644 --- a/openwisp_monitoring/check/migrations/0005_create_config_applied.py +++ b/openwisp_monitoring/check/migrations/0005_create_config_applied.py @@ -1,33 +1,5 @@ from django.db import migrations -from openwisp_monitoring.check.settings import AUTO_CONFIG_CHECK -from openwisp_monitoring.check.tasks import auto_create_config_check - - -def add_config_applied_checks(apps, schema_editor): - if not AUTO_CONFIG_CHECK: - return - ContentType = apps.get_model('contenttypes', 'ContentType') - Check = apps.get_model('check', 'Check') - Device = apps.get_model('config', 'Device') - for device in Device.objects.all(): - auto_create_config_check( - model=Device.__name__.lower(), - app_label=Device._meta.app_label, - object_id=str(device.pk), - check_model=Check, - content_type_model=ContentType, - ) - - -def remove_config_applied_checks(apps, schema_editor): - Check = apps.get_model('check', 'Check') - Metric = apps.get_model('monitoring', 'Metric') - Check.objects.filter( - check='openwisp_monitoring.check.classes.ConfigApplied' - ).delete() - Metric.objects.filter(configuration='config_applied').delete() - class Migration(migrations.Migration): @@ -35,8 +7,4 @@ class Migration(migrations.Migration): ('check', '0004_rename_active_to_is_active'), ] - operations = [ - migrations.RunPython( - add_config_applied_checks, reverse_code=remove_config_applied_checks - ) - ] + operations = [] diff --git a/openwisp_monitoring/check/migrations/0007_create_checks.py b/openwisp_monitoring/check/migrations/0007_create_checks.py new file mode 100644 index 000000000..b9cde2631 --- /dev/null +++ b/openwisp_monitoring/check/migrations/0007_create_checks.py @@ -0,0 +1,62 @@ +import swapper +from django.db import migrations + +from openwisp_monitoring.check.settings import AUTO_CONFIG_CHECK, AUTO_PING +from openwisp_monitoring.check.tasks import auto_create_config_check, auto_create_ping + + +def create_ping_checks(apps, schema_editor): + if AUTO_PING: + ContentType = apps.get_model('contenttypes', 'ContentType') + Check = apps.get_model('check', 'Check') + Device = apps.get_model('config', 'Device') + for device in Device.objects.all(): + auto_create_ping( + model=Device.__name__.lower(), + app_label=Device._meta.app_label, + object_id=str(device.pk), + check_model=Check, + content_type_model=ContentType, + ) + + +def create_config_applied_checks(apps, schema_editor): + if not AUTO_CONFIG_CHECK: + return + ContentType = apps.get_model('contenttypes', 'ContentType') + Check = apps.get_model('check', 'Check') + Device = apps.get_model('config', 'Device') + for device in Device.objects.all(): + auto_create_config_check( + model=Device.__name__.lower(), + app_label=Device._meta.app_label, + object_id=str(device.pk), + check_model=Check, + content_type_model=ContentType, + ) + + +def remove_config_applied_checks(apps, schema_editor): + Check = apps.get_model('check', 'Check') + Metric = apps.get_model('monitoring', 'Metric') + Check.objects.filter( + check='openwisp_monitoring.check.classes.ConfigApplied' + ).delete() + Metric.objects.filter(configuration='config_applied').delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ('check', '0006_rename_check_check_check_type'), + swapper.dependency('monitoring', 'Metric'), + ] + + operations = [ + migrations.RunPython( + create_ping_checks, reverse_code=migrations.RunPython.noop + ), + migrations.RunPython( + create_config_applied_checks, reverse_code=remove_config_applied_checks + ), + ] From e563520c7765d4e638f8e65b8b8e4095fa2fc4cc Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Mon, 8 Aug 2022 15:23:15 +0530 Subject: [PATCH 012/133] [feature] Added setting to configure default retention policy Added setting to configure default retention policy for the timeseries database. --- README.rst | 11 +++++++++++ .../db/backends/influxdb/tests.py | 19 +++++++++++++++---- openwisp_monitoring/device/apps.py | 7 ++++++- openwisp_monitoring/device/settings.py | 1 + openwisp_monitoring/device/utils.py | 9 +++++++++ 5 files changed, 42 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index 2792d49ee..cdb642dbf 100644 --- a/README.rst +++ b/README.rst @@ -942,6 +942,17 @@ if there's anything that is not working as intended. Settings -------- +``OPENWISP_MONITORING_DEFAULT_RETENTION_POLICY`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+--------------------------+ +| **type**: | ``str`` | ++--------------+--------------------------+ +| **default**: | ``26280h0m0s`` (3 years) | ++--------------+--------------------------+ + +The default retention policy that applies to the timeseries data. + ``OPENWISP_MONITORING_SHORT_RETENTION_POLICY`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/openwisp_monitoring/db/backends/influxdb/tests.py b/openwisp_monitoring/db/backends/influxdb/tests.py index 847cc39e1..4ca7ad752 100644 --- a/openwisp_monitoring/db/backends/influxdb/tests.py +++ b/openwisp_monitoring/db/backends/influxdb/tests.py @@ -11,8 +11,16 @@ from pytz import timezone as tz from swapper import load_model -from openwisp_monitoring.device.settings import SHORT_RETENTION_POLICY -from openwisp_monitoring.device.utils import SHORT_RP, manage_short_retention_policy +from openwisp_monitoring.device.settings import ( + DEFAULT_RETENTION_POLICY, + SHORT_RETENTION_POLICY, +) +from openwisp_monitoring.device.utils import ( + DEFAULT_RP, + SHORT_RP, + manage_default_retention_policy, + manage_short_retention_policy, +) from openwisp_monitoring.monitoring.tests import TestMonitoringMixin from openwisp_monitoring.settings import MONITORING_TIMESERIES_RETRY_OPTIONS from openwisp_utils.tests import capture_stderr @@ -183,12 +191,15 @@ def test_get_query_30d(self): def test_retention_policy(self): manage_short_retention_policy() + manage_default_retention_policy() rp = timeseries_db.get_list_retention_policies() self.assertEqual(len(rp), 2) + self.assertEqual(rp[0]['name'], DEFAULT_RP) + self.assertEqual(rp[0]['default'], True) + self.assertEqual(rp[0]['duration'], DEFAULT_RETENTION_POLICY) self.assertEqual(rp[1]['name'], SHORT_RP) self.assertEqual(rp[1]['default'], False) - duration = SHORT_RETENTION_POLICY - self.assertEqual(rp[1]['duration'], duration) + self.assertEqual(rp[1]['duration'], SHORT_RETENTION_POLICY) def test_query_set(self): c = self._create_chart(configuration='histogram') diff --git a/openwisp_monitoring/device/apps.py b/openwisp_monitoring/device/apps.py index 510629de1..d7370942a 100644 --- a/openwisp_monitoring/device/apps.py +++ b/openwisp_monitoring/device/apps.py @@ -24,7 +24,11 @@ from ..utils import transaction_on_commit from . import settings as app_settings from .signals import device_metrics_received, health_status_changed -from .utils import get_device_cache_key, manage_short_retention_policy +from .utils import ( + get_device_cache_key, + manage_default_retention_policy, + manage_short_retention_policy, +) class DeviceMonitoringConfig(AppConfig): @@ -33,6 +37,7 @@ class DeviceMonitoringConfig(AppConfig): verbose_name = _('Device Monitoring') def ready(self): + manage_default_retention_policy() manage_short_retention_policy() self.connect_is_working_changed() self.connect_device_signals() diff --git a/openwisp_monitoring/device/settings.py b/openwisp_monitoring/device/settings.py index 8a8250552..d239e3eac 100644 --- a/openwisp_monitoring/device/settings.py +++ b/openwisp_monitoring/device/settings.py @@ -43,6 +43,7 @@ def get_health_status_labels(): SHORT_RETENTION_POLICY = get_settings_value('SHORT_RETENTION_POLICY', '24h0m0s') +DEFAULT_RETENTION_POLICY = get_settings_value('DEFAULT_RETENTION_POLICY', '26280h0m0s') CRITICAL_DEVICE_METRICS = get_critical_device_metrics() HEALTH_STATUS_LABELS = get_health_status_labels() AUTO_CLEAR_MANAGEMENT_IP = get_settings_value('AUTO_CLEAR_MANAGEMENT_IP', True) diff --git a/openwisp_monitoring/device/utils.py b/openwisp_monitoring/device/utils.py index 3814d97ff..151b62609 100644 --- a/openwisp_monitoring/device/utils.py +++ b/openwisp_monitoring/device/utils.py @@ -2,6 +2,7 @@ from . import settings as app_settings SHORT_RP = 'short' +DEFAULT_RP = 'autogen' def get_device_cache_key(device, context='react-to-updates'): @@ -14,3 +15,11 @@ def manage_short_retention_policy(): """ duration = app_settings.SHORT_RETENTION_POLICY timeseries_db.create_or_alter_retention_policy(SHORT_RP, duration) + + +def manage_default_retention_policy(): + """ + creates or updates the "default" retention policy + """ + duration = app_settings.DEFAULT_RETENTION_POLICY + timeseries_db.create_or_alter_retention_policy(DEFAULT_RP, duration) From 52cf287a31026874fe73948badbbe174adfd250d Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Thu, 18 Aug 2022 20:19:13 +0530 Subject: [PATCH 013/133] [change] Timeseries data is deleted when device is deleted #419 Closes #419 --- .../device/tests/test_models.py | 23 +++++++++++++++++++ openwisp_monitoring/monitoring/apps.py | 12 +++++++++- openwisp_monitoring/monitoring/base/models.py | 6 ++++- openwisp_monitoring/monitoring/tasks.py | 5 ++++ .../monitoring/tests/test_models.py | 13 +++++++++++ 5 files changed, 57 insertions(+), 2 deletions(-) diff --git a/openwisp_monitoring/device/tests/test_models.py b/openwisp_monitoring/device/tests/test_models.py index 8b917bcea..b385e3477 100644 --- a/openwisp_monitoring/device/tests/test_models.py +++ b/openwisp_monitoring/device/tests/test_models.py @@ -649,6 +649,29 @@ def test_unknown_critical(self): dm.refresh_from_db() self.assertEqual(dm.status, 'critical') + def test_deleting_device_deletes_tsdb(self): + dm1, ping1, _, _ = self._create_env() + device2 = self._create_device( + name='default.test.device2', + mac_address='22:33:44:55:66:77', + organization=dm1.device.organization, + ) + dm2 = device2.monitoring + dm2.status = 'ok' + dm2.save() + ping2 = self._create_object_metric( + name='ping', key='ping', field_name='reachable', content_object=device2 + ) + ping1.write(0) + ping2.write(0) + self.assertNotEqual(ping1.read(), []) + self.assertNotEqual(ping2.read(), []) + dm1.device.delete() + # Only the metric related to the deleted device + # is deleted + self.assertEqual(ping1.read(), []) + self.assertNotEqual(ping2.read(), []) + class TestWifiClientSession(TestWifiClientSessionMixin, TestCase): wifi_client_model = WifiClient diff --git a/openwisp_monitoring/monitoring/apps.py b/openwisp_monitoring/monitoring/apps.py index 1200b833d..b66d6596f 100644 --- a/openwisp_monitoring/monitoring/apps.py +++ b/openwisp_monitoring/monitoring/apps.py @@ -1,7 +1,8 @@ from django.apps import AppConfig from django.conf import settings +from django.db.models.signals import post_delete from django.utils.translation import gettext_lazy as _ -from swapper import get_model_name +from swapper import get_model_name, load_model from openwisp_utils.admin_theme.menu import register_menu_group @@ -21,6 +22,7 @@ def ready(self): for metric_name, metric_config in metrics.items(): register_metric_notifications(metric_name, metric_config) self.register_menu_groups() + self.connect_metric_signals() def register_menu_groups(self): register_menu_group( @@ -44,3 +46,11 @@ def register_menu_groups(self): 'icon': 'ow-monitoring', }, ) + + def connect_metric_signals(self): + Metric = load_model('monitoring', 'Metric') + post_delete.connect( + Metric.post_delete_receiver, + sender=Metric, + dispatch_uid='metric_post_delete_receiver', + ) diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 863e46a3f..7f723f6fc 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -33,7 +33,7 @@ ) from ..exceptions import InvalidChartConfigException, InvalidMetricConfigException from ..signals import pre_metric_write, threshold_crossed -from ..tasks import timeseries_write +from ..tasks import delete_timeseries, timeseries_write User = get_user_model() logger = logging.getLogger(__name__) @@ -115,6 +115,10 @@ def full_clean(self, *args, **kwargs): self.key = self._makekey(self.key) return super().full_clean(*args, **kwargs) + @classmethod + def post_delete_receiver(cls, instance, *args, **kwargs): + delete_timeseries.delay(instance.key, instance.tags) + @classmethod def _get_or_create(cls, **kwargs): """ diff --git a/openwisp_monitoring/monitoring/tasks.py b/openwisp_monitoring/monitoring/tasks.py index 477d99c7c..1071eaae1 100644 --- a/openwisp_monitoring/monitoring/tasks.py +++ b/openwisp_monitoring/monitoring/tasks.py @@ -36,6 +36,11 @@ def timeseries_write( post_metric_write.send(**signal_kwargs) +@shared_task +def delete_timeseries(key, tags): + timeseries_db.delete_series(key=key, tags=tags) + + @shared_task def migrate_timeseries_database(): """ diff --git a/openwisp_monitoring/monitoring/tests/test_models.py b/openwisp_monitoring/monitoring/tests/test_models.py index 6cc731f09..30cdb8d4e 100644 --- a/openwisp_monitoring/monitoring/tests/test_models.py +++ b/openwisp_monitoring/monitoring/tests/test_models.py @@ -349,3 +349,16 @@ def test_get_time_str(self): m = self._create_general_metric(name='load') now = timezone.now() self.assertEqual(m._get_time(now.isoformat()), now) + + def test_deleting_metric_deletes_timeseries(self): + metric1 = self._create_general_metric(name='load') + metric2 = self._create_general_metric(name='traffic') + metric1.write(99) + metric2.write(5000) + self.assertNotEqual(metric1.read(), []) + self.assertNotEqual(metric2.read(), []) + metric1.delete() + self.assertEqual(metric1.read(), []) + # Only the timeseries data related to the deleted metric + # should be deleted + self.assertNotEqual(metric2.read(), []) From 4ef6fba9b54a2f041dc5efb4b2a738c66564980e Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Wed, 31 Aug 2022 20:35:06 +0530 Subject: [PATCH 014/133] [fix] Fixed traffic charts showing wrong traffic total #415 The traffic charts were showing the wrong total because of the approximation (round-off) done in the backend to decrease the the precision of values to two decimal places. After attempting to fix this in the backend we concluded that it would have been best fixed with JS code in the frontend. The total amount of traffic is now calculated entirely at client side level. Closes #415 --- openwisp_monitoring/db/backends/influxdb/queries.py | 6 ++---- openwisp_monitoring/device/tests/test_api.py | 12 ++---------- openwisp_monitoring/monitoring/base/models.py | 5 +++++ openwisp_monitoring/monitoring/configuration.py | 2 ++ .../monitoring/static/monitoring/js/chart.js | 12 ++++++++++++ openwisp_monitoring/monitoring/tests/test_api.py | 5 ++--- openwisp_monitoring/monitoring/tests/test_charts.py | 1 + openwisp_monitoring/views.py | 2 ++ 8 files changed, 28 insertions(+), 17 deletions(-) diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 11f048096..23a4b8ee9 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -39,8 +39,7 @@ 'traffic': { 'influxdb': ( "SELECT SUM(tx_bytes) / 1000000000 AS upload, " - "SUM(rx_bytes) / 1000000000 AS download, " - "((SUM(tx_bytes) + SUM(rx_bytes)) / 1000000000) AS total FROM {key} " + "SUM(rx_bytes) / 1000000000 AS download FROM {key} " "WHERE time >= '{time}' AND content_type = '{content_type}' " "AND object_id = '{object_id}' AND ifname = '{ifname}' " "GROUP BY time(1d)" @@ -49,8 +48,7 @@ 'general_traffic': { 'influxdb': ( "SELECT SUM(tx_bytes) / 1000000000 AS upload, " - "SUM(rx_bytes) / 1000000000 AS download, " - "((SUM(tx_bytes) + SUM(rx_bytes)) / 1000000000) AS total FROM {key} " + "SUM(rx_bytes) / 1000000000 AS download FROM {key} " "WHERE time >= '{time}' {organization_id} {location_id} " "{floorplan_id} {ifname} " "GROUP BY time(1d)" diff --git a/openwisp_monitoring/device/tests/test_api.py b/openwisp_monitoring/device/tests/test_api.py index 20729a72d..95eaf4eac 100644 --- a/openwisp_monitoring/device/tests/test_api.py +++ b/openwisp_monitoring/device/tests/test_api.py @@ -198,10 +198,8 @@ def test_200_multiple_measurements(self): data = c.read() # expected download wlan0 self.assertEqual(data['traces'][0][1][-1], 1.2) - # expected total wlan0 - self.assertEqual(data['traces'][1][1][-1], 1.8) # expected upload wlan0 - self.assertEqual(data['traces'][2][1][-1], 0.6) + self.assertEqual(data['traces'][1][1][-1], 0.6) # wlan1 traffic m = self.metric_queryset.get(name='wlan1 traffic', object_id=dd.pk) points = m.read(limit=10, order='-time', extra_fields=['tx_bytes']) @@ -216,10 +214,8 @@ def test_200_multiple_measurements(self): data = c.read() # expected download wlan1 self.assertEqual(data['traces'][0][1][-1], 3.0) - # expected total wlan1 - self.assertEqual(data['traces'][1][1][-1], 4.5) # expected upload wlan1 - self.assertEqual(data['traces'][2][1][-1], 1.5) + self.assertEqual(data['traces'][1][1][-1], 1.5) def test_200_no_date_supplied(self): o = self._create_org() @@ -339,10 +335,8 @@ def test_get_device_metrics_csv(self): 'wifi_clients - WiFi clients: wlan0', 'wifi_clients - WiFi clients: wlan1', 'download - Traffic: wlan0', - 'total - Traffic: wlan0', 'upload - Traffic: wlan0', 'download - Traffic: wlan1', - 'total - Traffic: wlan1', 'upload - Traffic: wlan1', 'memory_usage - Memory Usage', 'CPU_load - CPU Load', @@ -357,10 +351,8 @@ def test_get_device_metrics_csv(self): '1', '2', '0.4', - '0.5', '0.1', '2.0', - '3.0', '1.0', '9.73', '0.0', diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index 7f723f6fc..ab1df2074 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -440,6 +440,10 @@ def trace_type(self): def trace_order(self): return self.config_dict.get('trace_order', []) + @property + def calculate_total(self): + return self.config_dict.get('calculate_total', False) + @property def description(self): return self.config_dict['description'].format( @@ -625,6 +629,7 @@ def json(self, time=DEFAULT_TIME, **kwargs): 'unit': self.unit, 'trace_type': self.trace_type, 'trace_order': self.trace_order, + 'calculate_total': self.calculate_total, 'colors': self.colors, } ) diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 9c097e01c..36ab9d9dc 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -189,6 +189,7 @@ def _get_access_tech(): 'charts': { 'traffic': { 'type': 'stackedbar+lines', + 'calculate_total': True, 'trace_type': { 'download': 'stackedbar', 'upload': 'stackedbar', @@ -226,6 +227,7 @@ def _get_access_tech(): 'charts': { 'general_traffic': { 'type': 'stackedbar+lines', + 'calculate_total': True, 'trace_type': { 'download': 'stackedbar', 'upload': 'stackedbar', diff --git a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js index ee6933116..8a11ab9e8 100644 --- a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js +++ b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js @@ -155,6 +155,18 @@ } return {color: color, desc: desc}; } + if (data.calculate_total === true) { + var total = data.traces[0][1].slice(); + for (i = 1; i < data.traces.length; ++i) { + for (var j = 0; j < data.traces[i][1].length; ++j) { + total[j] += data.traces[i][1][j]; + } + } + data.traces.push(["total", total]); + data.summary.total = Object.values(data.summary).reduce(function (a, b) { + return a + b; + }, 0); + } // loop over traces to put them on the chart for (var i=0; i Date: Tue, 13 Sep 2022 13:59:29 +0200 Subject: [PATCH 015/133] [fix] Fixed mobile signal missing case In some cases, the mobile signal info may be missing, therefore we need the code to be resilient and not crash in this case (simply do not try to write mobile signal data). --- openwisp_monitoring/device/api/views.py | 4 +++ openwisp_monitoring/device/tests/test_api.py | 36 ++++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/openwisp_monitoring/device/api/views.py b/openwisp_monitoring/device/api/views.py index 04bac7890..e8d1055ab 100644 --- a/openwisp_monitoring/device/api/views.py +++ b/openwisp_monitoring/device/api/views.py @@ -207,6 +207,8 @@ def _get_extra_tags(self, device): return tags def _get_mobile_signal_type(self, signal): + if not signal: + return # if only one access technology in use, return that sections = list(signal.keys()) if len(sections) == 1: @@ -221,6 +223,8 @@ def _get_mobile_signal_type(self, signal): def _write_mobile_signal(self, interface, ifname, ct, pk, current=False, time=None): access_type = self._get_mobile_signal_type(interface['mobile']['signal']) + if not access_type: + return data = interface['mobile']['signal'][access_type] signal_power = signal_strength = None extra_values = {} diff --git a/openwisp_monitoring/device/tests/test_api.py b/openwisp_monitoring/device/tests/test_api.py index 95eaf4eac..8fb0c3549 100644 --- a/openwisp_monitoring/device/tests/test_api.py +++ b/openwisp_monitoring/device/tests/test_api.py @@ -906,6 +906,42 @@ def test_gsm_charts(self): self.assertEqual(charts[0]['summary']['signal_power'], None) self.assertEqual(charts[0]['summary']['signal_strength'], -70.0) + def test_mobile_signal_missing(self): + org = self._create_org() + device = self._create_device(organization=org) + data = { + 'type': 'DeviceMonitoring', + 'interfaces': [ + { + 'name': 'mobile0', + 'mac': '00:00:00:00:00:00', + 'mtu': 1900, + 'multicast': True, + 'txqueuelen': 1000, + 'type': 'modem-manager', + 'up': True, + 'mobile': { + 'connection_status': 'connected', + 'imei': '865847055230161', + 'manufacturer': 'QUALCOMM INCORPORATED', + 'model': 'QUECTEL Mobile Broadband Module', + 'operator_code': '22250', + 'operator_name': 'Iliad', + 'power_status': 'on', + 'signal': {}, + }, + } + ], + } + self._post_data(device.id, device.key, data) + response = self.client.get(self._url(device.pk.hex, device.key)) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data['charts'], []) + dd = DeviceData(name=device.name, pk=device.pk) + self.assertEqual( + dd.data['interfaces'][0]['mobile'], data['interfaces'][0]['mobile'] + ) + def test_pre_metric_write_signal(self): d = self._create_device(organization=self._create_org()) data = {'type': 'DeviceMonitoring', 'resources': {'cpus': 1, 'load': [0, 0, 0]}} From f75d62e577cb4ca4eac0377078edc09101b308af Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Thu, 15 Sep 2022 20:28:37 +0200 Subject: [PATCH 016/133] [docs] Improved explanation of time parameter in REST API --- README.rst | 48 ++++++++++++++++++++++++++++++++++++------------ 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/README.rst b/README.rst index cdb642dbf..202660ae7 100644 --- a/README.rst +++ b/README.rst @@ -1793,46 +1793,70 @@ Retrieve general monitoring charts GET /api/v1/monitoring/dashboard/ This API endpoint is used to show dashboard monitoring charts. It supports -multi-tenancy and allows filtering monitoring data with ``organization_slug``, +multi-tenancy and allows filtering monitoring data by ``organization_slug``, ``location_id`` and ``floorplan_id`` e.g.: .. code-block:: text GET /api/v1/monitoring/dashboard/?organization_slug=,&location_id=,&floorplan_id=, +Another parameter that can be used is ``time``, which allows to specify the +time frame, the allowed values are: + +- ``1d``: returns data of the last day +- ``3d``: returns data of the last 3 days +- ``7d``: returns data of the last 7 days +- ``30d``: returns data of the last 30 days +- ``365d``: returns data of the last 365 days + Retrieve device charts and device status data ############################################# .. code-block:: text - GET /api/v1/monitoring/device/{pk}/?key={key}&status=true + GET /api/v1/monitoring/device/{pk}/?key={key}&status=true&time={timeframe} + +The format used for Device Status is inspired by +`NetJSON DeviceMonitoring `_. + +**Notes**: -The format used for Device Status is inspired by `NetJSON DeviceMonitoring `_. +- If the request is made without ``?status=true`` then only device charts data would be returned. +- When retrieving data, the ``time`` parameter allows to specify the time frame, the allowed values are: -**Note**: If the request is made without ``?status=true`` then only device charts -data would be returned. + - ``1d``: returns data of the last day + - ``3d``: returns data of the last 3 days + - ``7d``: returns data of the last 7 days + - ``30d``: returns data of the last 30 days + - ``365d``: returns data of the last 365 days Collect device metrics and status ################################# .. code-block:: text - POST /api/v1/monitoring/device/{pk}/?key={key}&time={time} + POST /api/v1/monitoring/device/{pk}/?key={key}&time={datetime} If data is latest then an additional parameter current can also be passed. For e.g.: .. code-block:: text - POST /api/v1/monitoring/device/{pk}/?key={key}&time={time}¤t=true + POST /api/v1/monitoring/device/{pk}/?key={key}&time={datetime}¤t=true -The format used for Device Status is inspired by `NetJSON DeviceMonitoring `_. +The format used for Device Status is inspired by +`NetJSON DeviceMonitoring `_. -**Note**: Device data will be saved with in timeseries database with the specified ``time``, -this should be in the format ``%d-%m-%Y_%H:%M:%S.%f``, otherwise 400 Bad Response will be returned. +**Note**: the device data will be saved in the timeseries database using +the date time specified ``time``, this should be in the format +``%d-%m-%Y_%H:%M:%S.%f``, otherwise 400 Bad Response will be returned. -If the request is made without passing the ``time`` argument, the server local time will be used. +If the request is made without passing the ``time`` argument, +the server local time will be used. -The ``time`` parameter was added to support `resilient collection and sending of data by the OpenWISP Monitoring Agent `_. +The ``time`` parameter was added to support `resilient collection +and sending of data by the OpenWISP Monitoring Agent +`_, +this feature allows sending data collected while the device is offline. Signals ------- From 0a8bdab80de07cdea0df39bca22b34deb4b2caa1 Mon Sep 17 00:00:00 2001 From: Gagan Deep Date: Mon, 26 Sep 2022 21:17:50 +0530 Subject: [PATCH 017/133] [fix] Fixed WifiSession dashboard pie chart The WifiSession pie chart on the dashboard was not filtering the queryset by the related device's organization. See also https://github.com/openwisp/openwisp-utils/pull/313 --- openwisp_monitoring/device/apps.py | 1 + openwisp_monitoring/device/tests/test_admin.py | 14 ++++++++++++++ requirements-test.txt | 2 +- requirements.txt | 2 +- 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/openwisp_monitoring/device/apps.py b/openwisp_monitoring/device/apps.py index d7370942a..f29452b21 100644 --- a/openwisp_monitoring/device/apps.py +++ b/openwisp_monitoring/device/apps.py @@ -324,6 +324,7 @@ def register_dashboard_items(self): 'aggregate': { 'active__sum': Sum('active'), }, + 'organization_field': 'device__organization_id', }, 'filters': { 'key': 'stop_time__isnull', diff --git a/openwisp_monitoring/device/tests/test_admin.py b/openwisp_monitoring/device/tests/test_admin.py index 19dae190c..c2bdf1b9a 100644 --- a/openwisp_monitoring/device/tests/test_admin.py +++ b/openwisp_monitoring/device/tests/test_admin.py @@ -528,3 +528,17 @@ def test_deleting_device_with_wifisessions(self): response = self.client.post(path, {'post': 'yes'}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Device.objects.count(), 0) + + def test_dashboard_wifi_session_chart(self): + org1 = self._create_org(name='org1', slug='org1') + org1_device = self._create_device(organization=org1) + self._create_wifi_session(device=org1_device) + org2 = self._create_org(name='org2', slug='org2') + administrator = self._create_administrator([org2]) + self.client.force_login(administrator) + response = self.client.get(reverse('admin:index')) + self.assertEqual(response.status_code, 200) + self.assertDictEqual( + response.context['dashboard_charts'][13]['query_params'], + {'labels': [], 'values': []}, + ) diff --git a/requirements-test.txt b/requirements-test.txt index 686c61121..23c826532 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,4 +1,4 @@ -openwisp-utils[qa] @ https://github.com/openwisp/openwisp-utils/tarball/master +openwisp-utils[qa] @ https://github.com/openwisp/openwisp-utils/tarball/5a8f1dea187586d85c77b46e6e3fd725f557e3c6 redis~=3.5.3 django-redis~=4.12.1 mock-ssh-server~=0.9.0 diff --git a/requirements.txt b/requirements.txt index 10a2d9e97..b4aeedc58 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,4 @@ django-cache-memoize~=0.1.0 django-nested-admin~=3.4.0 netaddr~=0.8 python-dateutil>=2.7,<3.0 -openwisp-utils[rest] @ https://github.com/openwisp/openwisp-utils/tarball/master +openwisp-utils[rest] @ https://github.com/openwisp/openwisp-utils/tarball/5a8f1dea187586d85c77b46e6e3fd725f557e3c6 From 17cedfbb030111a92e3102ec18df0540e73438d8 Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Mon, 26 Sep 2022 19:20:57 +0200 Subject: [PATCH 018/133] [deps] Pin down openwisp-controller dev version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b4aeedc58..41bea508b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -openwisp-controller @ https://github.com/openwisp/openwisp-controller/tarball/master +openwisp-controller @ https://github.com/openwisp/openwisp-controller/tarball/61ab5c0e96e6ed0a5d789538a92d13f310250b41 influxdb~=5.3.1 django-cache-memoize~=0.1.0 django-nested-admin~=3.4.0 From b7e96fb60e3878ecfd5f5b4df592042c5ee15302 Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Tue, 11 Oct 2022 11:30:10 +0200 Subject: [PATCH 019/133] [docs] Added link to demo [skip ci] --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index 202660ae7..71ad255dd 100644 --- a/README.rst +++ b/README.rst @@ -35,6 +35,8 @@ openwisp-monitoring ------------ +**Need a quick overview?** `Try the OpenWISP Demo `_. + OpenWISP Monitoring is a network monitoring system written in Python and Django, designed to be **extensible**, **programmable**, **scalable** and easy to use by end users: once the system is configured, monitoring checks, alerts and metric collection From 6b4621a88493311f2249f7f24c3db9a4fb6dcd3e Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Mon, 17 Oct 2022 12:20:56 -0300 Subject: [PATCH 020/133] [deps] Switch back to openwisp-utils master to fix python 3.7 issue --- requirements-test.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-test.txt b/requirements-test.txt index 23c826532..686c61121 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,4 +1,4 @@ -openwisp-utils[qa] @ https://github.com/openwisp/openwisp-utils/tarball/5a8f1dea187586d85c77b46e6e3fd725f557e3c6 +openwisp-utils[qa] @ https://github.com/openwisp/openwisp-utils/tarball/master redis~=3.5.3 django-redis~=4.12.1 mock-ssh-server~=0.9.0 diff --git a/requirements.txt b/requirements.txt index 41bea508b..d3067bd26 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,4 @@ django-cache-memoize~=0.1.0 django-nested-admin~=3.4.0 netaddr~=0.8 python-dateutil>=2.7,<3.0 -openwisp-utils[rest] @ https://github.com/openwisp/openwisp-utils/tarball/5a8f1dea187586d85c77b46e6e3fd725f557e3c6 +openwisp-utils[rest] @ https://github.com/openwisp/openwisp-utils/tarball/master From 11b31ffea2e32b93085e04b5fcb806682cbecc58 Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Thu, 20 Oct 2022 10:07:29 -0300 Subject: [PATCH 021/133] [feature] Iperf3 #385 #390 #398 #399 #406 #405 #412 #414 #417 #418 #422 #424 #426 #406 #435 #416 #391 #443 Added Iperf3 check to perform active measurements of the maximum achievable bandwidth Closes #385 Closes #390 Closes #398 Closes #399 Closes #406 Closes #405 Closes #412 Closes #414 Closes #417 Closes #418 Closes #422 Closes #424 Closes #426 Closes #406 Closes #435 Closes #416 Closes #391 Closes #443 Signed-off-by: Aryaman Co-authored-by: Aryaman Co-authored-by: Federico Capoano Co-authored-by: Gagan Deep --- README.rst | 552 ++++++++- openwisp_monitoring/check/apps.py | 8 + openwisp_monitoring/check/base/models.py | 44 +- openwisp_monitoring/check/classes/__init__.py | 1 + openwisp_monitoring/check/classes/iperf3.py | 548 +++++++++ .../migrations/0008_alter_check_options.py | 24 + .../0009_add_check_inline_permissions.py | 19 + .../check/migrations/__init__.py | 34 + openwisp_monitoring/check/settings.py | 8 + openwisp_monitoring/check/tasks.py | 48 +- .../check/tests/iperf3_test_utils.py | 1078 +++++++++++++++++ .../check/tests/test_iperf3.py | 783 ++++++++++++ .../check/tests/test_models.py | 50 +- openwisp_monitoring/check/tests/test_ping.py | 2 +- .../db/backends/influxdb/queries.py | 45 + openwisp_monitoring/device/admin.py | 133 +- .../device/tests/test_admin.py | 290 ++++- openwisp_monitoring/monitoring/base/models.py | 77 +- .../monitoring/configuration.py | 124 +- .../0009_alter_alertsettings_options.py | 26 + ...10_add_alertsettings_inline_permissions.py | 19 + .../0011_alter_metric_field_name.py | 23 + .../monitoring/migrations/__init__.py | 31 + .../monitoring/static/monitoring/js/chart.js | 59 +- .../monitoring/tests/__init__.py | 9 + .../monitoring/tests/test_api.py | 2 +- .../monitoring/tests/test_charts.py | 1 + .../monitoring/tests/test_models.py | 25 + .../tests/test_monitoring_notifications.py | 126 ++ openwisp_monitoring/tests/test_selenium.py | 2 +- openwisp_monitoring/views.py | 2 + requirements.txt | 2 +- .../sample_check/migrations/0001_initial.py | 6 + .../0003_add_check_inline_permissions.py | 21 + .../migrations/0001_initial.py | 6 + ...03_add_alertsettings_inline_permissions.py | 21 + .../0004_alter_metric_field_name.py | 23 + tests/openwisp2/settings.py | 26 +- 38 files changed, 4218 insertions(+), 80 deletions(-) create mode 100644 openwisp_monitoring/check/classes/iperf3.py create mode 100644 openwisp_monitoring/check/migrations/0008_alter_check_options.py create mode 100644 openwisp_monitoring/check/migrations/0009_add_check_inline_permissions.py create mode 100644 openwisp_monitoring/check/tests/iperf3_test_utils.py create mode 100644 openwisp_monitoring/check/tests/test_iperf3.py create mode 100644 openwisp_monitoring/monitoring/migrations/0009_alter_alertsettings_options.py create mode 100644 openwisp_monitoring/monitoring/migrations/0010_add_alertsettings_inline_permissions.py create mode 100644 openwisp_monitoring/monitoring/migrations/0011_alter_metric_field_name.py create mode 100644 tests/openwisp2/sample_check/migrations/0003_add_check_inline_permissions.py create mode 100644 tests/openwisp2/sample_monitoring/migrations/0003_add_alertsettings_inline_permissions.py create mode 100644 tests/openwisp2/sample_monitoring/migrations/0004_alter_metric_field_name.py diff --git a/README.rst b/README.rst index 71ad255dd..e3fabb6e6 100644 --- a/README.rst +++ b/README.rst @@ -89,7 +89,9 @@ Available Features `RAM usage <#memory-usage>`_, `CPU load <#cpu-load>`_, `flash/disk usage <#disk-usage>`_, mobile signal (LTE/UMTS/GSM `signal strength <#mobile-signal-strength>`_, `signal quality <#mobile-signal-quality>`_, - `access technology in use <#mobile-access-technology-in-use>`_) + `access technology in use <#mobile-access-technology-in-use>`_), `bandwidth <#iperf3>`_, + `transferred data <#iperf3>`_, `restransmits <#iperf3>`_, `jitter <#iperf3>`_, + `datagram <#iperf3>`_, `datagram loss <#iperf3>`_ * Maintains a record of `WiFi sessions <#monitoring-wifi-sessions>`_ with clients' MAC address and vendor, session start and stop time and connected device along with other information @@ -107,6 +109,8 @@ Available Features * Extensible metrics and charts: it's possible to define new metrics and new charts * API to retrieve the chart metrics and status information of each device based on `NetJSON DeviceMonitoring `_ +* `Iperf3 check <#iperf3-1>`_ that provides network performance measurements such as maximum + achievable bandwidth, jitter, datagram loss etc of the openwrt device using `iperf3 utility `_ ------------ @@ -378,7 +382,15 @@ Configure celery (you may use a different broker if you want): CELERY_BEAT_SCHEDULE = { 'run_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', + # Executes only ping & config check every 5 min 'schedule': timedelta(minutes=5), + 'args': ( + [ # Checks path + 'openwisp_monitoring.check.classes.Ping', + 'openwisp_monitoring.check.classes.ConfigApplied', + ], + ), + 'relative': True, }, # Delete old WifiSession 'delete_wifi_clients_and_sessions': { @@ -805,6 +817,59 @@ Mobile Access Technology in use .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/access-technology.png :align: center +Iperf3 +~~~~~~ + ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **measurement**: | ``iperf3`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **types**: | | ``int`` (iperf3_result, sent_bytes_tcp, received_bytes_tcp, retransmits, sent_bytes_udp, total_packets, lost_packets), | +| | | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bps_udp, jitter, lost_percent) | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **fields**: | | ``iperf3_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | +| | | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **configuration**: | ``iperf3`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ +| **charts**: | ``bandwidth``, ``transfer``, ``retransmits``, ``jitter``, ``datagram``, ``datagram_loss`` | ++--------------------+---------------------------------------------------------------------------------------------------------------------------+ + +**Bandwidth**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/bandwidth.png + :align: center + +**Transferred Data**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/transferred-data.png + :align: center + +**Retransmits**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/retransmits.png + :align: center + +**Jitter**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/jitter.png + :align: center + +**Datagram**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/datagram.png + :align: center + +**Datagram loss**: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/datagram-loss.png + :align: center + +For more info on how to configure and use Iperf3, please refer to +`iperf3 check usage instructions <#iperf3-check-usage-instructions>`_. + +**Note:** Iperf3 charts uses ``connect_points=True`` in +`default chart configuration <#openwisp_monitoring_charts>`_ that joins it's individual chart data points. + Dashboard Monitoring Charts --------------------------- @@ -821,15 +886,15 @@ You can configure the interfaces included in the **General traffic chart** using the `"OPENWISP_MONITORING_DASHBOARD_TRAFFIC_CHART" <#openwisp_monitoring_dashboard_traffic_chart>`_ setting. -Adaptive byte charts +Adaptive size charts -------------------- .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/adaptive-chart.png :align: center When configuring charts, it is possible to flag their unit -as ``adaptive_bytes``, this allows to make the charts more readable because -the units are shown in either `B`, `KB`, `MB`, `GB` and `TB` depending on +as ``adaptive_prefix``, this allows to make the charts more readable because +the units are shown in either `K`, `M`, `G` and `T` depending on the size of each point, the summary values and Y axis are also resized. Example taken from the default configuration of the traffic chart: @@ -838,7 +903,17 @@ Example taken from the default configuration of the traffic chart: 'traffic': { # other configurations for this chart - 'unit': 'adaptive_bytes', + + # traffic measured in 'B' (bytes) + # unit B, KB, MB, GB, TB + 'unit': 'adaptive_prefix+B', + }, + + 'bandwidth': { + # adaptive unit for bandwidth related charts + # bandwidth measured in 'bps'(bits/sec) + # unit bps, Kbps, Mbps, Gbps, Tbps + 'unit': 'adaptive_prefix+bps', }, Monitoring WiFi Sessions @@ -941,6 +1016,384 @@ configuration status of a device changes, this ensures the check reacts quickly to events happening in the network and informs the user promptly if there's anything that is not working as intended. +Iperf3 +~~~~~~ + +This check provides network performance measurements such as maximum achievable bandwidth, +jitter, datagram loss etc of the device using `iperf3 utility `_. + +This check is **disabled by default**. You can enable auto creation of this check by setting the +`OPENWISP_MONITORING_AUTO_IPERF3 <#OPENWISP_MONITORING_AUTO_IPERF3>`_ to ``True``. + +You can also `add the iperf3 check +<#add-checks-and-alert-settings-from-the-device-page>`_ directly from the device page. + +It also supports tuning of various parameters. + +You can also change the parameters used for iperf3 checks (e.g. timing, port, username, +password, rsa_publc_key etc) using the `OPENWISP_MONITORING_IPERF3_CHECK_CONFIG +<#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_ setting. + +**Note:** When setting `OPENWISP_MONITORING_AUTO_IPERF3 <#OPENWISP_MONITORING_AUTO_IPERF3>`_ to ``True``, +you may need to update the `metric configuration <#add-checks-and-alert-settings-from-the-device-page>`_ +to enable alerts for the iperf3 check. + +Iperf3 Check Usage Instructions +------------------------------- + +4. Make sure iperf3 is installed on the device +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Register your device to OpenWISP and make sure the `iperf3 openwrt package +`_ is installed on the device, +eg: + +.. code-block:: shell + + opkg install iperf3 # if using without authentication + opkg install iperf3-ssl # if using with authentication (read below for more info) + +2. Ensure SSH access from OpenWISP is enabled on your devices +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Follow the steps in `"How to configure push updates" section of the +OpenWISP documentation +`_ +to allow SSH access to you device from OpenWISP. + +**Note:** Make sure device connection is enabled +& working with right update strategy i.e. ``OpenWRT SSH``. + +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/enable-openwrt-ssh.png + :alt: Enable ssh access from openwisp to device + :align: center + +3. Set up and configure Iperf3 server settings +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After having deployed your Iperf3 servers, you need to +configure the iperf3 settings on the django side of OpenWISP, +see the `test project settings for reference +`_. + +The host can be specified by hostname, IPv4 literal, or IPv6 literal. +Example: + +.. code-block:: python + + OPENWISP_MONITORING_IPERF3_CHECK_CONFIG = { + # 'org_pk' : {'host' : [], 'client_options' : {}} + 'a9734710-db30-46b0-a2fc-01f01046fe4f': { + # Some public iperf3 servers + # https://iperf.fr/iperf-servers.php#public-servers + 'host': ['iperf3.openwisp.io', '2001:db8::1', '192.168.5.2'], + 'client_options': { + 'port': 5209, + 'udp': {'bitrate': '30M'}, + 'tcp': {'bitrate': '0'}, + }, + }, + # another org + 'b9734710-db30-46b0-a2fc-01f01046fe4f': { + # available iperf3 servers + 'host': ['iperf3.openwisp2.io', '192.168.5.3'], + 'client_options': { + 'port': 5207, + 'udp': {'bitrate': '50M'}, + 'tcp': {'bitrate': '20M'}, + }, + }, + } + +**Note:** If an organization has more than one iperf3 server configured, then it enables +the iperf3 checks to run concurrently on different devices. If all of the available servers +are busy, then it will add the check back in the queue. + +The celery-beat configuration for the iperf3 check needs to be added too: + +.. code-block:: python + + from celery.schedules import crontab + + # Celery TIME_ZONE should be equal to django TIME_ZONE + # In order to schedule run_iperf3_checks on the correct time intervals + CELERY_TIMEZONE = TIME_ZONE + CELERY_BEAT_SCHEDULE = { + # Other celery beat configurations + # Celery beat configuration for iperf3 check + 'run_iperf3_checks': { + 'task': 'openwisp_monitoring.check.tasks.run_checks', + # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules + # Executes check every 5 mins from 00:00 AM to 6:00 AM (night) + 'schedule': crontab(minute='*/5', hour='0-6'), + # Iperf3 check path + 'args': (['openwisp_monitoring.check.classes.Iperf3'],), + 'relative': True, + } + } + +Once the changes are saved, you will need to restart all the processes. + +**Note:** We recommended to configure this check to run in non peak +traffic times to not interfere with standard traffic. + +4. Run the check +~~~~~~~~~~~~~~~~ + +This should happen automatically if you have celery-beat correctly +configured and running in the background. +For testing purposes, you can run this check manually using the +`run_checks <#run_checks>`_ command. + +After that, you should see the iperf3 network measurements charts. + +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/iperf3-charts.png + :alt: Iperf3 network measurement charts + +Iperf3 check parameters +~~~~~~~~~~~~~~~~~~~~~~~ + +Currently, iperf3 check supports the following parameters: + ++-----------------------+----------+--------------------------------------------------------------------+ +| **Parameter** | **Type** | **Default Value** | ++-----------------------+----------+--------------------------------------------------------------------+ +|``host`` | ``list`` | ``[]`` | ++-----------------------+----------+--------------------------------------------------------------------+ +|``username`` | ``str`` | ``''`` | ++-----------------------+----------+--------------------------------------------------------------------+ +|``password`` | ``str`` | ``''`` | ++-----------------------+----------+--------------------------------------------------------------------+ +|``rsa_public_key`` | ``str`` | ``''`` | ++-----------------------+----------+--------------------------------------------------------------------+ +|``client_options`` | +---------------------+----------+------------------------------------------+ | +| | | **Parameters** | **Type** | **Default Value** | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``port`` | ``int`` | ``5201`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``time`` | ``int`` | ``10`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``bytes`` | ``str`` | ``''`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``blockcount`` | ``str`` | ``''`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``window`` | ``str`` | ``0`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``parallel`` | ``int`` | ``1`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``reverse`` | ``bool`` | ``False`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``bidirectional`` | ``bool`` | ``False`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``connect_timeout`` | ``int`` | ``1`` | | +| | +---------------------+----------+------------------------------------------+ | +| | | ``tcp`` | +----------------+----------+---------------------+ | | +| | | | | **Parameters** | **Type** | **Default Value** | | | +| | | | +----------------+----------+---------------------+ | | +| | | | |``bitrate`` | ``str`` | ``0`` | | | +| | | | +----------------+----------+---------------------+ | | +| | | | |``length`` | ``str`` | ``128K`` | | | +| | | | +----------------+----------+---------------------+ | | +| | +---------------------+-----------------------------------------------------+ | +| | | ``udp`` | +----------------+----------+---------------------+ | | +| | | | | **Parameters** | **Type** | **Default Value** | | | +| | | | +----------------+----------+---------------------+ | | +| | | | |``bitrate`` | ``str`` | ``30M`` | | | +| | | | +----------------+----------+---------------------+ | | +| | | | |``length`` | ``str`` | ``0`` | | | +| | | | +----------------+----------+---------------------+ | | +| | +---------------------+-----------------------------------------------------+ | ++-----------------------+-------------------------------------------------------------------------------+ + +To learn how to use these parameters, please see the +`iperf3 check configuration example <#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_. + +Visit the `official documentation `_ +to learn more about the iperf3 parameters. + +Iperf3 authentication +~~~~~~~~~~~~~~~~~~~~~ + +By default iperf3 check runs without any kind of **authentication**, +in this section we will explain how to configure **RSA authentication** +between the **client** and the **server** to restrict connections +to authenticated clients. + +Server side +########### + +1. Generate RSA keypair +^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: shell + + openssl genrsa -des3 -out private.pem 2048 + openssl rsa -in private.pem -outform PEM -pubout -out public_key.pem + openssl rsa -in private.pem -out private_key.pem -outform PEM + +After running the commands mentioned above, the public key will be stored in +``public_key.pem`` which will be used in **rsa_public_key** parameter +in `OPENWISP_MONITORING_IPERF3_CHECK_CONFIG +<#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_ +and the private key will be contained in the file ``private_key.pem`` +which will be used with **--rsa-private-key-path** command option when +starting the iperf3 server. + +2. Create user credentials +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: shell + + USER=iperfuser PASSWD=iperfpass + echo -n "{$USER}$PASSWD" | sha256sum | awk '{ print $1 }' + ---- + ee17a7f98cc87a6424fb52682396b2b6c058e9ab70e946188faa0714905771d7 #This is the hash of "iperfuser" + +Add the above hash with username in ``credentials.csv`` + +.. code-block:: shell + + # file format: username,sha256 + iperfuser,ee17a7f98cc87a6424fb52682396b2b6c058e9ab70e946188faa0714905771d7 + +3. Now start the iperf3 server with auth options +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: shell + + iperf3 -s --rsa-private-key-path ./private_key.pem --authorized-users-path ./credentials.csv + +Client side (OpenWrt device) +############################ + +1. Install iperf3-ssl +^^^^^^^^^^^^^^^^^^^^^ + +Install the `iperf3-ssl openwrt package +`_ +instead of the normal +`iperf3 openwrt package `_ +because the latter comes without support for authentication. + +You may also check your installed **iperf3 openwrt package** features: + +.. code-block:: shell + + root@vm-openwrt:~ iperf3 -v + iperf 3.7 (cJSON 1.5.2) + Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64 + Optional features available: CPU affinity setting, IPv6 flow label, TCP congestion algorithm setting, + sendfile / zerocopy, socket pacing, authentication # contains 'authentication' + +2. Configure iperf3 check auth parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Now, add the following iperf3 authentication parameters +to `OPENWISP_MONITORING_IPERF3_CHECK_CONFIG +<#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_ +in the settings: + +.. code-block:: python + + OPENWISP_MONITORING_IPERF3_CHECK_CONFIG = { + 'a9734710-db30-46b0-a2fc-01f01046fe4f': { + 'host': ['iperf1.openwisp.io', 'iperf2.openwisp.io', '192.168.5.2'], + # All three parameters (username, password, rsa_publc_key) + # are required for iperf3 authentication + 'username': 'iperfuser', + 'password': 'iperfpass', + # Add RSA public key without any headers + # ie. -----BEGIN PUBLIC KEY-----, -----BEGIN END KEY----- + 'rsa_public_key': ( + """ + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwuEm+iYrfSWJOupy6X3N + dxZvUCxvmoL3uoGAs0O0Y32unUQrwcTIxudy38JSuCccD+k2Rf8S4WuZSiTxaoea + 6Du99YQGVZeY67uJ21SWFqWU+w6ONUj3TrNNWoICN7BXGLE2BbSBz9YaXefE3aqw + GhEjQz364Itwm425vHn2MntSp0weWb4hUCjQUyyooRXPrFUGBOuY+VvAvMyAG4Uk + msapnWnBSxXt7Tbb++A5XbOMdM2mwNYDEtkD5ksC/x3EVBrI9FvENsH9+u/8J9Mf + 2oPl4MnlCMY86MQypkeUn7eVWfDnseNky7TyC0/IgCXve/iaydCCFdkjyo1MTAA4 + BQIDAQAB + """ + ), + 'client_options': { + 'port': 5209, + 'udp': {'bitrate': '20M'}, + 'tcp': {'bitrate': '0'}, + }, + } + } + +Adding Checks and Alert settings from the device page +----------------------------------------------------- + +We can add checks and define alert settings directly from the **device page**. + +To add a check, you just need to select an available **check type** as shown below: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/device-inline-check.png + :align: center + +The following example shows how to use the +`OPENWISP_MONITORING_METRICS setting <#openwisp_monitoring_metrics>`_ +to reconfigure the system for `iperf3 check <#iperf3-1>`_ to send an alert if +the measured **TCP bandwidth** has been less than **10 Mbit/s** for more than **2 days**. + +1. By default, `Iperf3 checks <#iperf3-1>`_ come with default alert settings, +but it is easy to customize alert settings through the device page as shown below: + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/device-inline-alertsettings.png + :align: center + +2. Now, add the following notification configuration to send an alert for **TCP bandwidth**: + +.. code-block:: python + + # Main project settings.py + from django.utils.translation import gettext_lazy as _ + + OPENWISP_MONITORING_METRICS = { + 'iperf3': { + 'notification': { + 'problem': { + 'verbose_name': 'Iperf3 PROBLEM', + 'verb': _('Iperf3 bandwidth is less than normal value'), + 'level': 'warning', + 'email_subject': _( + '[{site.name}] PROBLEM: {notification.target} {notification.verb}' + ), + 'message': _( + 'The device [{notification.target}]({notification.target_link}) ' + '{notification.verb}.' + ), + }, + 'recovery': { + 'verbose_name': 'Iperf3 RECOVERY', + 'verb': _('Iperf3 bandwidth now back to normal'), + 'level': 'info', + 'email_subject': _( + '[{site.name}] RECOVERY: {notification.target} {notification.verb}' + ), + 'message': _( + 'The device [{notification.target}]({notification.target_link}) ' + '{notification.verb}.' + ), + }, + }, + }, + } + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/alert_field_warn.png + :align: center + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/alert_field_info.png + :align: center + +**Note:** To access the features described above, the user must have permissions for ``Check`` and ``AlertSetting`` inlines, +these permissions are included by default in the "Administrator" and "Operator" groups and are shown in the screenshot below. + +.. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/inline-permissions.png + :align: center + Settings -------- @@ -1035,6 +1488,91 @@ validating custom parameters of a ``Check`` object. This setting allows you to choose whether `config_applied <#configuration-applied>`_ checks should be created automatically for newly registered devices. It's enabled by default. +``OPENWISP_MONITORING_AUTO_IPERF3`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------+ +| **type**: | ``bool`` | ++--------------+-------------+ +| **default**: | ``False`` | ++--------------+-------------+ + +This setting allows you to choose whether `iperf3 <#iperf3-1>`_ checks should be +created automatically for newly registered devices. It's disabled by default. + +``OPENWISP_MONITORING_IPERF3_CHECK_CONFIG`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------+ +| **type**: | ``dict`` | ++--------------+-------------+ +| **default**: | ``{}`` | ++--------------+-------------+ + +This setting allows to override the default iperf3 check configuration defined in +``openwisp_monitoring.check.classes.iperf3.DEFAULT_IPERF3_CHECK_CONFIG``. + +For example, you can change the values of `supported iperf3 check parameters <#iperf3-check-parameters>`_. + +.. code-block:: python + + OPENWISP_MONITORING_IPERF3_CHECK_CONFIG = { + # 'org_pk' : {'host' : [], 'client_options' : {}} + 'a9734710-db30-46b0-a2fc-01f01046fe4f': { + # Some public iperf3 servers + # https://iperf.fr/iperf-servers.php#public-servers + 'host': ['iperf3.openwisp.io', '2001:db8::1', '192.168.5.2'], + 'client_options': { + 'port': 6209, + # Number of parallel client streams to run + # note that iperf3 is single threaded + # so if you are CPU bound this will not + # yield higher throughput + 'parallel': 5, + # Set the connect_timeout (in milliseconds) for establishing + # the initial control connection to the server, the lower the value + # the faster the down iperf3 server will be detected + 'connect_timeout': 1, + # Window size / socket buffer size + 'window': '300K', + # Only one reverse condition can be chosen, + # reverse or bidirectional + 'reverse': True, + # Only one test end condition can be chosen, + # time, bytes or blockcount + 'blockcount': '1K', + 'udp': {'bitrate': '50M', 'length': '1460K'}, + 'tcp': {'bitrate': '20M', 'length': '256K'}, + }, + } + } + +``OPENWISP_MONITORING_IPERF3_CHECK_DELETE_RSA_KEY`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------------------------+ +| **type**: | ``bool`` | ++--------------+-------------------------------+ +| **default**: | ``True`` | ++--------------+-------------------------------+ + +This setting allows you to set whether +`iperf3 check RSA public key <#configure-iperf3-check-for-authentication>`_ +will be deleted after successful completion of the check or not. + +``OPENWISP_MONITORING_IPERF3_CHECK_LOCK_EXPIRE`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++--------------+-------------------------------+ +| **type**: | ``int`` | ++--------------+-------------------------------+ +| **default**: | ``600`` | ++--------------+-------------------------------+ + +This setting allows you to set a cache lock expiration time for the iperf3 check when +running on multiple servers. Make sure it is always greater than the total iperf3 check +time, i.e. greater than the TCP + UDP test time. By default, it is set to **600 seconds (10 mins)**. + ``OPENWISP_MONITORING_AUTO_CHARTS`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1616,6 +2154,10 @@ The ``AlertSettings`` of ``ping`` metric will by default use ``threshold`` and ` defined in the ``alert_settings`` key. You can always override them and define your own custom values via the *admin*. +You can also use the ``alert_field`` key in metric configuration +which allows ``AlertSettings`` to check the ``threshold`` on +``alert_field`` instead of the default ``field_name`` key. + **Note**: It will raise ``ImproperlyConfigured`` exception if a metric configuration is already registered with same name (not to be confused with verbose_name). diff --git a/openwisp_monitoring/check/apps.py b/openwisp_monitoring/check/apps.py index 15c9832f0..e0a45998a 100644 --- a/openwisp_monitoring/check/apps.py +++ b/openwisp_monitoring/check/apps.py @@ -32,3 +32,11 @@ def _connect_signals(self): sender=load_model('config', 'Device'), dispatch_uid='auto_config_check', ) + if app_settings.AUTO_IPERF3: + from .base.models import auto_iperf3_check_receiver + + post_save.connect( + auto_iperf3_check_receiver, + sender=load_model('config', 'Device'), + dispatch_uid='auto_iperf3_check', + ) diff --git a/openwisp_monitoring/check/base/models.py b/openwisp_monitoring/check/base/models.py index a0bab9e66..0d040a7b1 100644 --- a/openwisp_monitoring/check/base/models.py +++ b/openwisp_monitoring/check/base/models.py @@ -9,7 +9,11 @@ from jsonfield import JSONField from openwisp_monitoring.check import settings as app_settings -from openwisp_monitoring.check.tasks import auto_create_config_check, auto_create_ping +from openwisp_monitoring.check.tasks import ( + auto_create_config_check, + auto_create_iperf3_check, + auto_create_ping, +) from openwisp_utils.base import TimeStampedEditableModel from ...utils import transaction_on_commit @@ -50,6 +54,13 @@ class Meta: abstract = True unique_together = ('name', 'object_id', 'content_type') + permissions = ( + ('add_check_inline', 'Can add check inline'), + ('change_check_inline', 'Can change check inline'), + ('delete_check_inline', 'Can delete check inline'), + ('view_check_inline', 'Can view check inline'), + ) + def __str__(self): if not self.object_id or not self.content_type: return self.name @@ -60,6 +71,14 @@ def __str__(self): def clean(self): self.check_instance.validate() + def full_clean(self, *args, **kwargs): + # The name of the check will be the same as the + # 'check_type' chosen by the user when the + # name field is empty (useful for CheckInline) + if not self.name: + self.name = self.get_check_type_display() + return super().full_clean(*args, **kwargs) + @cached_property def check_class(self): """ @@ -81,6 +100,11 @@ def perform_check(self, store=True): """ return self.check_instance.check(store=True) + def perform_check_delayed(self, duration=0): + from ..tasks import perform_check + + perform_check.apply_async(args=[self.id], countdown=duration) + def auto_ping_receiver(sender, instance, created, **kwargs): """ @@ -116,3 +140,21 @@ def auto_config_check_receiver(sender, instance, created, **kwargs): object_id=str(instance.pk), ) ) + + +def auto_iperf3_check_receiver(sender, instance, created, **kwargs): + """ + Implements OPENWISP_MONITORING_AUTO_IPERF3 + The creation step is executed in the background + """ + # we need to skip this otherwise this task will be executed + # every time the configuration is requested via checksum + if not created: + return + transaction_on_commit( + lambda: auto_create_iperf3_check.delay( + model=sender.__name__.lower(), + app_label=sender._meta.app_label, + object_id=str(instance.pk), + ) + ) diff --git a/openwisp_monitoring/check/classes/__init__.py b/openwisp_monitoring/check/classes/__init__.py index 33bf8293c..a7d9fde29 100644 --- a/openwisp_monitoring/check/classes/__init__.py +++ b/openwisp_monitoring/check/classes/__init__.py @@ -1,2 +1,3 @@ from .config_applied import ConfigApplied # noqa +from .iperf3 import Iperf3 # noqa from .ping import Ping # noqa diff --git a/openwisp_monitoring/check/classes/iperf3.py b/openwisp_monitoring/check/classes/iperf3.py new file mode 100644 index 000000000..e3f56748a --- /dev/null +++ b/openwisp_monitoring/check/classes/iperf3.py @@ -0,0 +1,548 @@ +import logging +from functools import reduce +from json import loads +from json.decoder import JSONDecodeError + +from django.core.cache import cache +from django.core.exceptions import ValidationError +from jsonschema import draft7_format_checker, validate +from jsonschema.exceptions import ValidationError as SchemaError +from swapper import load_model + +from openwisp_controller.connection.settings import UPDATE_STRATEGIES + +from .. import settings as app_settings +from .base import BaseCheck + +logger = logging.getLogger(__name__) + +Chart = load_model('monitoring', 'Chart') +Metric = load_model('monitoring', 'Metric') +AlertSettings = load_model('monitoring', 'AlertSettings') +DeviceConnection = load_model('connection', 'DeviceConnection') + +DEFAULT_IPERF3_CHECK_CONFIG = { + 'host': { + 'type': 'array', + 'items': { + 'type': 'string', + }, + 'default': [], + }, + # username, password max_length chosen from iperf3 docs to avoid iperf3 param errors + 'username': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, + 'password': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, + 'rsa_public_key': { + 'type': 'string', + 'default': '', + }, + 'client_options': { + 'type': 'object', + 'properties': { + 'port': { + 'type': 'integer', + 'default': 5201, + # max, min port chosen from iperf3 docs + 'minimum': 1, + 'maximum': 65535, + }, + 'time': { + # sets the interval time in seconds + # between periodic bandwidth, jitter, and loss reports + 'type': 'integer', + 'default': 10, + 'minimum': 1, + # arbitrary chosen to avoid slowing down the queue (30min) + 'maximum': 1800, + }, + 'bytes': { + # number of bytes to transmit (instead of 'time') + 'type': 'string', + # default to '' since we're using time for + # the test end condition instead of bytes + 'default': '', + }, + 'blockcount': { + # number of blocks (packets) to transmit + # instead of 'time' or 'bytes' + 'type': 'string', + # default to '' since we're using time for + # the test end condition instead of blockcount + 'default': '', + }, + 'window': { + # window size / socket buffer size + # this gets sent to the server and used on that side too + 'type': 'string', + 'default': '0', + }, + 'parallel': { + # number of parallel client streams to run + # note that iperf3 is single threaded + # so if you are CPU bound this will not yield higher throughput + 'type': 'integer', + 'default': 1, + # max, min parallel streams chosen from iperf3 docs + 'minimum': 1, + 'maximum': 128, + }, + 'reverse': { + # reverse the direction of a test + # the server sends data to the client + 'type': 'boolean', + 'default': False, + }, + 'bidirectional': { + # test in both directions (normal and reverse) + # with both the client and server sending + # and receiving data simultaneously + 'type': 'boolean', + 'default': False, + }, + 'connect_timeout': { + # set timeout for establishing the initial + # control connection to the server, in milliseconds (ms) + # providing a shorter value (ex. 1 ms) may + # speed up detection of a down iperf3 server + 'type': 'integer', + 'default': 1, + 'minimum': 1, + # arbitrary chosen to avoid slowing down the queue (1000 sec) + 'maximum': 1000000, + }, + 'tcp': { + 'type': 'object', + 'properties': { + 'bitrate': { + # set target bitrate to n bits/sec + 'type': 'string', + 'default': '0', + }, + 'length': { + # length of buffer to read or write + 'type': 'string', + # for TCP tests, the default value is 128KB + 'default': '128K', + }, + }, + }, + 'udp': { + 'type': 'object', + 'properties': { + 'bitrate': { + 'type': 'string', + # set target bitrate to n bits/sec + # 30 Mbps + 'default': '30M', + }, + 'length': { + # iperf3 tries to dynamically determine a + # reasonable sending size based on the path MTU + # if that cannot be determined it uses 1460 bytes + 'type': 'string', + 'default': '0', + }, + }, + }, + }, + }, +} + + +def get_iperf3_schema(): + schema = { + '$schema': 'http://json-schema.org/draft-07/schema#', + 'type': 'object', + 'additionalProperties': True, + 'dependencies': { + 'username': ['password', 'rsa_public_key'], + 'password': ['username', 'rsa_public_key'], + 'rsa_public_key': ['username', 'password'], + }, + } + schema['properties'] = DEFAULT_IPERF3_CHECK_CONFIG + return schema + + +class Iperf3(BaseCheck): + + schema = get_iperf3_schema() + + def validate_params(self, params=None): + try: + if not params: + params = self.params + validate(params, self.schema, format_checker=draft7_format_checker) + except SchemaError as e: + message = 'Invalid param' + path = '/'.join(e.path) + if path: + message = '{0} in "{1}"'.format(message, path) + message = '{0}: {1}'.format(message, e.message) + raise ValidationError({'params': message}) from e + + def _validate_iperf3_config(self, org): + # if iperf3 config is present and validate it's params + if app_settings.IPERF3_CHECK_CONFIG: + self.validate_params( + params=app_settings.IPERF3_CHECK_CONFIG.get(str(org.id)) + ) + + def check(self, store=True): + lock_acquired = False + org = self.related_object.organization + self._validate_iperf3_config(org) + available_iperf3_servers = self._get_param('host', 'host.default') + if not available_iperf3_servers: + logger.warning( + ( + f'Iperf3 servers for organization "{org}" ' + f'is not configured properly, iperf3 check skipped!' + ) + ) + return + time = self._get_param( + 'client_options.time', 'client_options.properties.time.default' + ) + # Try to acquire a lock, or put task back on queue + for server in available_iperf3_servers: + server_lock_key = f'ow_monitoring_{org}_iperf3_check_{server}' + # Set available_iperf3_server to the org device + lock_acquired = cache.add( + server_lock_key, + str(self.related_object), + timeout=app_settings.IPERF3_CHECK_LOCK_EXPIRE, + ) + if lock_acquired: + break + else: + logger.info( + ( + f'At the moment, all available iperf3 servers of organization "{org}" ' + f'are busy running checks, putting "{self.check_instance}" back in the queue..' + ) + ) + # Return the iperf3_check task to the queue, + # it will executed after 2 * iperf3_check_time (TCP+UDP) + self.check_instance.perform_check_delayed(duration=2 * time) + return + try: + # Execute the iperf3 check with current available server + result = self._run_iperf3_check(store, server, time) + finally: + # Release the lock after completion of the check + cache.delete(server_lock_key) + return result + + def _run_iperf3_check(self, store, server, time): + device_connection = self._get_device_connection() + if not device_connection: + logger.warning( + f'Failed to get a working DeviceConnection for "{self.related_object}", iperf3 check skipped!' + ) + return + # The DeviceConnection could fail if the management tunnel is down. + if not device_connection.connect(): + logger.warning( + f'DeviceConnection for "{self.related_object}" is not working, iperf3 check skipped!' + ) + return + command_tcp, command_udp = self._get_check_commands(server) + + # TCP mode + result, exit_code = device_connection.connector_instance.exec_command( + command_tcp, raise_unexpected_exit=False + ) + # Exit code 127 : command doesn't exist + if exit_code == 127: + logger.warning( + f'Iperf3 is not installed on the "{self.related_object}", error - {result.strip()}' + ) + return + + result_tcp = self._get_iperf3_result(result, exit_code, mode='TCP') + # UDP mode + result, exit_code = device_connection.connector_instance.exec_command( + command_udp, raise_unexpected_exit=False + ) + result_udp = self._get_iperf3_result(result, exit_code, mode='UDP') + result = {} + if store and result_tcp and result_udp: + # Store iperf3_result field 1 if any mode passes, store 0 when both fails + iperf3_result = result_tcp['iperf3_result'] | result_udp['iperf3_result'] + result.update({**result_tcp, **result_udp, 'iperf3_result': iperf3_result}) + self.store_result(result) + device_connection.disconnect() + return result + + def _get_check_commands(self, server): + """ + Returns tcp & udp commands for iperf3 check + """ + username = self._get_param('username', 'username.default') + port = self._get_param( + 'client_options.port', 'client_options.properties.port.default' + ) + window = self._get_param( + 'client_options.window', 'client_options.properties.window.default' + ) + parallel = self._get_param( + 'client_options.parallel', 'client_options.properties.parallel.default' + ) + ct = self._get_param( + 'client_options.connect_timeout', + 'client_options.properties.connect_timeout.default', + ) + tcp_bitrate = self._get_param( + 'client_options.tcp.bitrate', + 'client_options.properties.tcp.properties.bitrate.default', + ) + tcp_length = self._get_param( + 'client_options.tcp.length', + 'client_options.properties.tcp.properties.length.default', + ) + udp_bitrate = self._get_param( + 'client_options.udp.bitrate', + 'client_options.properties.udp.properties.bitrate.default', + ) + udp_length = self._get_param( + 'client_options.udp.length', + 'client_options.properties.udp.properties.length.default', + ) + + rev_or_bidir, test_end_condition = self._get_iperf3_test_conditions() + logger.info(f'«« Iperf3 server : {server}, Device : {self.related_object} »»') + command_tcp = ( + f'iperf3 -c {server} -p {port} {test_end_condition} --connect-timeout {ct} ' + f'-b {tcp_bitrate} -l {tcp_length} -w {window} -P {parallel} {rev_or_bidir} -J' + ) + command_udp = ( + f'iperf3 -c {server} -p {port} {test_end_condition} --connect-timeout {ct} ' + f'-b {udp_bitrate} -l {udp_length} -w {window} -P {parallel} {rev_or_bidir} -u -J' + ) + + # All three parameters ie. username, password and rsa_public_key is required + # for authentication to work, checking only username here + if username: + password = self._get_param('password', 'password.default') + key = self._get_param('rsa_public_key', 'rsa_public_key.default') + rsa_public_key = self._get_compelete_rsa_key(key) + rsa_public_key_path = '/tmp/iperf3-public-key.pem' + + command_tcp = ( + f'echo "{rsa_public_key}" > {rsa_public_key_path} && ' + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} {test_end_condition} ' + f'--username "{username}" --rsa-public-key-path {rsa_public_key_path} --connect-timeout {ct} ' + f'-b {tcp_bitrate} -l {tcp_length} -w {window} -P {parallel} {rev_or_bidir} -J' + ) + + command_udp = ( + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} {test_end_condition} ' + f'--username "{username}" --rsa-public-key-path {rsa_public_key_path} --connect-timeout {ct} ' + f'-b {udp_bitrate} -l {udp_length} -w {window} -P {parallel} {rev_or_bidir} -u -J' + ) + + # If IPERF3_CHECK_DELETE_RSA_KEY, remove rsa_public_key from the device + if app_settings.IPERF3_CHECK_DELETE_RSA_KEY: + command_udp = f'{command_udp} && rm -f {rsa_public_key_path}' + return command_tcp, command_udp + + def _get_iperf3_test_conditions(self): + """ + Returns iperf3 check test conditions (rev_or_bidir, end_condition) + """ + time = self._get_param( + 'client_options.time', 'client_options.properties.time.default' + ) + bytes = self._get_param( + 'client_options.bytes', 'client_options.properties.bytes.default' + ) + blockcount = self._get_param( + 'client_options.blockcount', 'client_options.properties.blockcount.default' + ) + reverse = self._get_param( + 'client_options.reverse', 'client_options.properties.reverse.default' + ) + bidirectional = self._get_param( + 'client_options.bidirectional', + 'client_options.properties.bidirectional.default', + ) + # by default we use 'time' param + # for the iperf3 test end condition + test_end_condition = f'-t {time}' + # if 'bytes' present in config + # use it instead of 'time' + if bytes: + test_end_condition = f'-n {bytes}' + # if 'blockcount' present in config + # use it instead of 'time' or 'bytes' + if blockcount: + test_end_condition = f'-k {blockcount}' + # only one reverse condition can be use + # reverse or bidirectional not both + rev_or_bidir = '' + if reverse: + rev_or_bidir = '--reverse' + if bidirectional: + rev_or_bidir = '--bidir' + return rev_or_bidir, test_end_condition + + def _get_compelete_rsa_key(self, key): + """ + Returns RSA key with proper format + """ + pem_prefix = '-----BEGIN PUBLIC KEY-----\n' + pem_suffix = '\n-----END PUBLIC KEY-----' + key = key.strip() + return f'{pem_prefix}{key}{pem_suffix}' + + def _get_device_connection(self): + """ + Returns an active SSH DeviceConnection for a device + """ + openwrt_ssh = UPDATE_STRATEGIES[0][0] + device_connection = DeviceConnection.objects.filter( + device_id=self.related_object.id, + update_strategy=openwrt_ssh, + enabled=True, + ).first() + return device_connection + + def _deep_get(self, dictionary, keys, default=None): + """ + Returns dict key value using dict & + it's dot_key string ie. key1.key2_nested.key3_nested + if found otherwise returns default + """ + return reduce( + lambda d, key: d.get(key, default) if isinstance(d, dict) else default, + keys.split("."), + dictionary, + ) + + def _get_param(self, conf_key, default_conf_key): + """ + Returns specified param or its default value according to the schema + """ + org_id = str(self.related_object.organization.id) + iperf3_config = app_settings.IPERF3_CHECK_CONFIG + + if self.params: + check_params = self._deep_get(self.params, conf_key) + if check_params: + return check_params + + if iperf3_config: + iperf3_config = iperf3_config.get(org_id) + iperf3_config_param = self._deep_get(iperf3_config, conf_key) + if iperf3_config_param: + return iperf3_config_param + + return self._deep_get(DEFAULT_IPERF3_CHECK_CONFIG, default_conf_key) + + def _get_iperf3_result(self, result, exit_code, mode): + """ + Returns iperf3 test result + """ + try: + result = loads(result) + except JSONDecodeError: + # Errors other than iperf3 test errors + logger.warning( + f'Iperf3 check failed for "{self.related_object}", error - {result.strip()}' + ) + return + + if mode == 'TCP': + if exit_code != 0: + logger.warning( + f'Iperf3 check failed for "{self.related_object}", {result["error"]}' + ) + return { + 'iperf3_result': 0, + 'sent_bps_tcp': 0.0, + 'received_bps_tcp': 0.0, + 'sent_bytes_tcp': 0, + 'received_bytes_tcp': 0, + 'retransmits': 0, + } + else: + sent = result['end']['sum_sent'] + received = result['end']['sum_received'] + return { + 'iperf3_result': 1, + 'sent_bps_tcp': float(sent['bits_per_second']), + 'received_bps_tcp': float(received['bits_per_second']), + 'sent_bytes_tcp': sent['bytes'], + 'received_bytes_tcp': received['bytes'], + 'retransmits': sent['retransmits'], + } + + elif mode == 'UDP': + if exit_code != 0: + logger.warning( + f'Iperf3 check failed for "{self.related_object}", {result["error"]}' + ) + return { + 'iperf3_result': 0, + 'sent_bps_udp': 0.0, + 'sent_bytes_udp': 0, + 'jitter': 0.0, + 'total_packets': 0, + 'lost_packets': 0, + 'lost_percent': 0.0, + } + else: + return { + 'iperf3_result': 1, + 'sent_bps_udp': float(result['end']['sum']['bits_per_second']), + 'sent_bytes_udp': result['end']['sum']['bytes'], + 'jitter': float(result['end']['sum']['jitter_ms']), + 'total_packets': result['end']['sum']['packets'], + 'lost_packets': result['end']['sum']['lost_packets'], + 'lost_percent': float(result['end']['sum']['lost_percent']), + } + + def store_result(self, result): + """ + Store result in the DB + """ + metric = self._get_metric() + copied = result.copy() + iperf3_result = copied.pop('iperf3_result') + metric.write(iperf3_result, extra_values=copied) + + def _get_metric(self): + """ + Gets or creates metric + """ + metric, created = self._get_or_create_metric() + if created: + self._create_alert_settings(metric) + self._create_charts(metric) + return metric + + def _create_alert_settings(self, metric): + """ + Creates default iperf3 alert settings with is_active=False + """ + alert_settings = AlertSettings(metric=metric, is_active=False) + alert_settings.full_clean() + alert_settings.save() + + def _create_charts(self, metric): + """ + Creates iperf3 related charts + """ + charts = [ + 'bandwidth', + 'transfer', + 'retransmits', + 'jitter', + 'datagram', + 'datagram_loss', + ] + for chart in charts: + chart = Chart(metric=metric, configuration=chart) + chart.full_clean() + chart.save() diff --git a/openwisp_monitoring/check/migrations/0008_alter_check_options.py b/openwisp_monitoring/check/migrations/0008_alter_check_options.py new file mode 100644 index 000000000..021ea044c --- /dev/null +++ b/openwisp_monitoring/check/migrations/0008_alter_check_options.py @@ -0,0 +1,24 @@ +# Generated by Django 3.2.14 on 2022-08-12 07:50 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('check', '0007_create_checks'), + ] + + operations = [ + migrations.AlterModelOptions( + name='check', + options={ + 'permissions': ( + ('add_check_inline', 'Can add check inline'), + ('change_check_inline', 'Can change check inline'), + ('delete_check_inline', 'Can delete check inline'), + ('view_check_inline', 'Can view check inline'), + ) + }, + ), + ] diff --git a/openwisp_monitoring/check/migrations/0009_add_check_inline_permissions.py b/openwisp_monitoring/check/migrations/0009_add_check_inline_permissions.py new file mode 100644 index 000000000..3dfdfe345 --- /dev/null +++ b/openwisp_monitoring/check/migrations/0009_add_check_inline_permissions.py @@ -0,0 +1,19 @@ +# Generated by Django 4.0.4 on 2022-08-19 11:28 + +from django.db import migrations + +from . import assign_check_inline_permissions_to_groups + + +class Migration(migrations.Migration): + + dependencies = [ + ('check', '0008_alter_check_options'), + ] + + operations = [ + migrations.RunPython( + assign_check_inline_permissions_to_groups, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/openwisp_monitoring/check/migrations/__init__.py b/openwisp_monitoring/check/migrations/__init__.py index e69de29bb..afe4dbbc8 100644 --- a/openwisp_monitoring/check/migrations/__init__.py +++ b/openwisp_monitoring/check/migrations/__init__.py @@ -0,0 +1,34 @@ +from django.contrib.auth.models import Permission + +from openwisp_controller.migrations import create_default_permissions, get_swapped_model + + +def assign_check_inline_permissions_to_groups(apps, schema_editor): + create_default_permissions(apps, schema_editor) + operators_read_only_admins_manage = [ + 'check', + ] + manage_operations = ['add', 'view', 'change', 'delete'] + Group = get_swapped_model(apps, 'openwisp_users', 'Group') + + try: + admin = Group.objects.get(name='Administrator') + operator = Group.objects.get(name='Operator') + # consider failures custom cases + # that do not have to be dealt with + except Group.DoesNotExist: + return + + for model_name in operators_read_only_admins_manage: + try: + permission = Permission.objects.get( + codename='view_{}_inline'.format(model_name) + ) + operator.permissions.add(permission.pk) + except Permission.DoesNotExist: + pass + for operation in manage_operations: + permission = Permission.objects.get( + codename='{}_{}_inline'.format(operation, model_name) + ) + admin.permissions.add(permission.pk) diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 4575c8eca..54b439bab 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -5,9 +5,17 @@ ( ('openwisp_monitoring.check.classes.Ping', 'Ping'), ('openwisp_monitoring.check.classes.ConfigApplied', 'Configuration Applied'), + ('openwisp_monitoring.check.classes.Iperf3', 'Iperf3'), ), ) AUTO_PING = get_settings_value('AUTO_PING', True) AUTO_CONFIG_CHECK = get_settings_value('AUTO_DEVICE_CONFIG_CHECK', True) MANAGEMENT_IP_ONLY = get_settings_value('MANAGEMENT_IP_ONLY', True) PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) +AUTO_IPERF3 = get_settings_value('AUTO_IPERF3', False) +IPERF3_CHECK_CONFIG = get_settings_value('IPERF3_CHECK_CONFIG', {}) +IPERF3_CHECK_LOCK_EXPIRE = get_settings_value( + 'IPERF3_CHECK_LOCK_EXPIRE', 10 * 60 +) # 10 minutes arbitrarily chosen (must be longer than TCP + UDP test time) +IPERF3_CHECK_DELETE_RSA_KEY = get_settings_value('IPERF3_CHECK_DELETE_RSA_KEY', True) +CHECKS_LIST = get_settings_value('CHECK_LIST', list(dict(CHECK_CLASSES).keys())) diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 2ae62bc0d..6a643db48 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -4,9 +4,11 @@ from celery import shared_task from django.conf import settings from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import ObjectDoesNotExist +from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist from swapper import load_model +from .settings import CHECKS_LIST + logger = logging.getLogger(__name__) @@ -15,7 +17,7 @@ def get_check_model(): @shared_task -def run_checks(): +def run_checks(checks=None): """ Retrieves the id of all active checks in chunks of 2000 items and calls the ``perform_check`` task (defined below) for each of them. @@ -23,9 +25,22 @@ def run_checks(): This allows to enqueue all the checks that need to be performed and execute them in parallel with multiple workers if needed. """ + # If checks is None, We should execute all the checks + if checks is None: + checks = CHECKS_LIST + + if not isinstance(checks, list): + raise ImproperlyConfigured( + f'Check path {checks} should be of type "list"' + ) # pragma: no cover + if not all(check_path in CHECKS_LIST for check_path in checks): + raise ImproperlyConfigured( + f'Check path {checks} should be in {CHECKS_LIST}' + ) # pragma: no cover + iterator = ( get_check_model() - .objects.filter(is_active=True) + .objects.filter(is_active=True, check_type__in=checks) .only('id') .values('id') .iterator() @@ -100,3 +115,30 @@ def auto_create_config_check( ) check.full_clean() check.save() + + +@shared_task +def auto_create_iperf3_check( + model, app_label, object_id, check_model=None, content_type_model=None +): + """ + Called by openwisp_monitoring.check.models.auto_iperf3_check_receiver + """ + Check = check_model or get_check_model() + iperf3_check_path = 'openwisp_monitoring.check.classes.Iperf3' + has_check = Check.objects.filter( + object_id=object_id, content_type__model='device', check_type=iperf3_check_path + ).exists() + # create new check only if necessary + if has_check: + return + content_type_model = content_type_model or ContentType + ct = content_type_model.objects.get(app_label=app_label, model=model) + check = Check( + name='Iperf3', + check_type=iperf3_check_path, + content_type=ct, + object_id=object_id, + ) + check.full_clean() + check.save() diff --git a/openwisp_monitoring/check/tests/iperf3_test_utils.py b/openwisp_monitoring/check/tests/iperf3_test_utils.py new file mode 100644 index 000000000..37ce9b164 --- /dev/null +++ b/openwisp_monitoring/check/tests/iperf3_test_utils.py @@ -0,0 +1,1078 @@ +# flake8: noqa + +RESULT_TCP = """ +{ + "start": { + "connected": [ + { + "socket": 5, + "local_host": "127.0.0.1", + "local_port": 54966, + "remote_host": "127.0.0.1", + "remote_port": 5201 + } + ], + "version": "iperf 3.9", + "system_info": "Linux openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", + "timestamp": { + "time": "Thu, 30 Jun 2022 21:39:55 GMT", + "timesecs": 1656625195 + }, + "connecting_to": { + "host": "localhost", + "port": 5201 + }, + "cookie": "npx4ad65t3j4wginxr4a7mqedmkhhspx3sob", + "tcp_mss_default": 32768, + "sock_bufsize": 0, + "sndbuf_actual": 16384, + "rcvbuf_actual": 131072, + "test_start": { + "protocol": "TCP", + "num_streams": 1, + "blksize": 131072, + "omit": 0, + "duration": 10, + "bytes": 0, + "blocks": 0, + "reverse": 0, + "tos": 0 + } + }, + "intervals": [ + { + "streams": [ + { + "socket": 5, + "start": 0, + "end": 1.000048, + "seconds": 1.000048041343689, + "bytes": 5790760960, + "bits_per_second": 46323862219.414116, + "retransmits": 0, + "snd_cwnd": 1506109, + "rtt": 22, + "rttvar": 3, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 0, + "end": 1.000048, + "seconds": 1.000048041343689, + "bytes": 5790760960, + "bits_per_second": 46323862219.414116, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 1.000048, + "end": 2.000185, + "seconds": 1.0001369714736938, + "bytes": 5463080960, + "bits_per_second": 43698662209.83867, + "retransmits": 0, + "snd_cwnd": 2160939, + "rtt": 22, + "rttvar": 3, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 1.000048, + "end": 2.000185, + "seconds": 1.0001369714736938, + "bytes": 5463080960, + "bits_per_second": 43698662209.83867, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 2.000185, + "end": 3.00019, + "seconds": 1.0000050067901611, + "bytes": 5679349760, + "bits_per_second": 45434570598.638954, + "retransmits": 0, + "snd_cwnd": 2553837, + "rtt": 21, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 2.000185, + "end": 3.00019, + "seconds": 1.0000050067901611, + "bytes": 5679349760, + "bits_per_second": 45434570598.638954, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 3.00019, + "end": 4.000232, + "seconds": 1.0000419616699219, + "bytes": 5710807040, + "bits_per_second": 45684539320.4405, + "retransmits": 0, + "snd_cwnd": 2553837, + "rtt": 24, + "rttvar": 5, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 3.00019, + "end": 4.000232, + "seconds": 1.0000419616699219, + "bytes": 5710807040, + "bits_per_second": 45684539320.4405, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 4.000232, + "end": 5.000158, + "seconds": 0.999925971031189, + "bytes": 5307105280, + "bits_per_second": 42459985508.942955, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 27, + "rttvar": 4, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 4.000232, + "end": 5.000158, + "seconds": 0.999925971031189, + "bytes": 5307105280, + "bits_per_second": 42459985508.942955, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 5.000158, + "end": 6.000229, + "seconds": 1.0000710487365723, + "bytes": 5308416000, + "bits_per_second": 42464310964.35657, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 28, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 5.000158, + "end": 6.000229, + "seconds": 1.0000710487365723, + "bytes": 5308416000, + "bits_per_second": 42464310964.35657, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 6.000229, + "end": 7.000056, + "seconds": 0.9998270273208618, + "bytes": 5241569280, + "bits_per_second": 41939808681.0701, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 23, + "rttvar": 4, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 6.000229, + "end": 7.000056, + "seconds": 0.9998270273208618, + "bytes": 5241569280, + "bits_per_second": 41939808681.0701, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 7.000056, + "end": 8.000202, + "seconds": 1.0001460313797, + "bytes": 5734400000, + "bits_per_second": 45868501759.40331, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 22, + "rttvar": 1, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 7.000056, + "end": 8.000202, + "seconds": 1.0001460313797, + "bytes": 5734400000, + "bits_per_second": 45868501759.40331, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 8.000202, + "end": 9.0003, + "seconds": 1.0000979900360107, + "bytes": 5415895040, + "bits_per_second": 43322915105.98867, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 35, + "rttvar": 12, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 8.000202, + "end": 9.0003, + "seconds": 1.0000979900360107, + "bytes": 5415895040, + "bits_per_second": 43322915105.98867, + "retransmits": 0, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 9.0003, + "end": 10.000218, + "seconds": 0.999917984008789, + "bytes": 5402787840, + "bits_per_second": 43225847930.76398, + "retransmits": 0, + "snd_cwnd": 3208667, + "rtt": 26, + "rttvar": 17, + "pmtu": 65535, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 9.0003, + "end": 10.000218, + "seconds": 0.999917984008789, + "bytes": 5402787840, + "bits_per_second": 43225847930.76398, + "retransmits": 0, + "omitted": false, + "sender": true + } + } + ], + "end": { + "streams": [ + { + "sender": { + "socket": 5, + "start": 0, + "end": 10.000218, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042377604.16823, + "retransmits": 0, + "max_snd_cwnd": 3208667, + "max_rtt": 35, + "min_rtt": 21, + "mean_rtt": 25, + "sender": true + }, + "receiver": { + "socket": 5, + "start": 0, + "end": 10.000272, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042139781.797935, + "sender": true + } + } + ], + "sum_sent": { + "start": 0, + "end": 10.000218, + "seconds": 10.000218, + "bytes": 55054172160, + "bits_per_second": 44042377604.16823, + "retransmits": 0, + "sender": true + }, + "sum_received": { + "start": 0, + "end": 10.000272, + "seconds": 10.000272, + "bytes": 55054172160, + "bits_per_second": 44042139781.797935, + "sender": true + }, + "cpu_utilization_percent": { + "host_total": 99.49882081069975, + "host_user": 0.6620490539150914, + "host_system": 98.83676176238454, + "remote_total": 0.377797593572381, + "remote_user": 0.02174276147834767, + "remote_system": 0.35605477540538377 + }, + "sender_tcp_congestion": "cubic", + "receiver_tcp_congestion": "cubic" + } +} +""" + +RESULT_UDP = """ +{ + "start": { + "connected": [ + { + "socket": 5, + "local_host": "127.0.0.1", + "local_port": 54477, + "remote_host": "127.0.0.1", + "remote_port": 5201 + } + ], + "version": "iperf 3.9", + "system_info": "openwisp-desktop 5.11.2-51-generic #58~20.04.1-Ubuntu SMP Tue Jun 14 11:29:12 UTC 2022 x86_64", + "timestamp": { + "time": "Thu, 30 Jun 2022 21:10:31 GMT", + "timesecs": 1656623431 + }, + "connecting_to": { + "host": "localhost", + "port": 5201 + }, + "cookie": "kvuxkz3ncutquvpl2evufmdkn726molzocot", + "sock_bufsize": 0, + "sndbuf_actual": 212992, + "rcvbuf_actual": 212992, + "test_start": { + "protocol": "UDP", + "num_streams": 1, + "blksize": 32768, + "omit": 0, + "duration": 10, + "bytes": 0, + "blocks": 0, + "reverse": 0, + "tos": 0 + } + }, + "intervals": [ + { + "streams": [ + { + "socket": 5, + "start": 0, + "end": 1.000057, + "seconds": 1.0000569820404053, + "bytes": 131072, + "bits_per_second": 1048516.253404483, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 0, + "end": 1.000057, + "seconds": 1.0000569820404053, + "bytes": 131072, + "bits_per_second": 1048516.253404483, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 1.000057, + "end": 2.000079, + "seconds": 1.000022053718567, + "bytes": 131072, + "bits_per_second": 1048552.875509981, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 1.000057, + "end": 2.000079, + "seconds": 1.000022053718567, + "bytes": 131072, + "bits_per_second": 1048552.875509981, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 2.000079, + "end": 3.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 2.000079, + "end": 3.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 3.000079, + "end": 4.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 3.000079, + "end": 4.000079, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 4.000079, + "end": 5.000182, + "seconds": 1.0001029968261719, + "bytes": 131072, + "bits_per_second": 1048468.0111225117, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 4.000079, + "end": 5.000182, + "seconds": 1.0001029968261719, + "bytes": 131072, + "bits_per_second": 1048468.0111225117, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 5.000182, + "end": 6.000056, + "seconds": 0.9998739957809448, + "bytes": 131072, + "bits_per_second": 1048708.1416504055, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 5.000182, + "end": 6.000056, + "seconds": 0.9998739957809448, + "bytes": 131072, + "bits_per_second": 1048708.1416504055, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 6.000056, + "end": 7.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 6.000056, + "end": 7.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 7.000056, + "end": 8.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 7.000056, + "end": 8.000056, + "seconds": 1, + "bytes": 131072, + "bits_per_second": 1048576, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 8.000056, + "end": 9.000057, + "seconds": 1.0000009536743164, + "bytes": 131072, + "bits_per_second": 1048575.0000009537, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 8.000056, + "end": 9.000057, + "seconds": 1.0000009536743164, + "bytes": 131072, + "bits_per_second": 1048575.0000009537, + "packets": 4, + "omitted": false, + "sender": true + } + }, + { + "streams": [ + { + "socket": 5, + "start": 9.000057, + "end": 10.00006, + "seconds": 1.0000029802322388, + "bytes": 131072, + "bits_per_second": 1048572.8750093132, + "packets": 4, + "omitted": false, + "sender": true + } + ], + "sum": { + "start": 9.000057, + "end": 10.00006, + "seconds": 1.0000029802322388, + "bytes": 131072, + "bits_per_second": 1048572.8750093132, + "packets": 4, + "omitted": false, + "sender": true + } + } + ], + "end": { + "streams": [ + { + "udp": { + "socket": 5, + "start": 0, + "end": 10.00006, + "seconds": 10.00006, + "bytes": 1310720, + "bits_per_second": 1048569.7085817485, + "jitter_ms": 0.011259258240784126, + "lost_packets": 0, + "packets": 40, + "lost_percent": 0, + "out_of_order": 0, + "sender": true + } + } + ], + "sum": { + "start": 0, + "end": 10.000115, + "seconds": 10.000115, + "bytes": 1310720, + "bits_per_second": 1048569.7085817485, + "jitter_ms": 0.011259258240784126, + "lost_packets": 0, + "packets": 40, + "lost_percent": 0, + "sender": true + }, + "cpu_utilization_percent": { + "host_total": 0.6057128493969417, + "host_user": 0, + "host_system": 0.6057128493969417, + "remote_total": 0.016163250220207454, + "remote_user": 0.01616789349806445, + "remote_system": 0 + } + } +} +""" + +RESULT_FAIL = """ +{ + "start": { + "connected": [], + "version": "iperf 3.7", + "system_info": "Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64" + }, + "intervals": [], + "end": {}, + "error": "error - unable to connect to server: Connection refused" +} +""" +RESULT_AUTH_FAIL = """ +{ + "start": { + "connected": [], + "version": "iperf 3.7", + "system_info": "Linux vm-openwrt 4.14.171 #0 SMP Thu Feb 27 21:05:12 2020 x86_64", + "timestamp": { + "time": "Tue, 19 Jul 2022 12:23:38 UTC", + "timesecs": 1658233418 + }, + "connecting_to": { + "host": "192.168.5.109", + "port": 5201 + }, + "cookie": "llz5f6akwyonbtcj3fx4phvfaflohdlvxr4z", + "tcp_mss_default": 1460 + }, + "intervals": [], + "end": {}, + "error": "error - test authorization failed" +} +""" +PARAM_ERROR = """Usage: iperf3 [-s|-c host] [options] + iperf3 [-h|--help] [-v|--version] + +Server or Client: + -p, --port # server port to listen on/connect to + -f, --format [kmgtKMGT] format to report: Kbits, Mbits, Gbits, Tbits + -i, --interval # seconds between periodic throughput reports + -F, --file name xmit/recv the specified file + -A, --affinity n/n,m set CPU affinity + -B, --bind bind to the interface associated with the address + -V, --verbose more detailed output + -J, --json output in JSON format + --logfile f send output to a log file + --forceflush force flushing output at every interval + -d, --debug emit debugging output + -v, --version show version information and quit + -h, --help show this message and quit +Server specific: + -s, --server run in server mode + -D, --daemon run the server as a daemon + -I, --pidfile file write PID file + -1, --one-off handle one client connection then exit + --rsa-private-key-path path to the RSA private key used to decrypt + authentication credentials + --authorized-users-path path to the configuration file containing user + credentials +Client specific: + -c, --client run in client mode, connecting to + -u, --udp use UDP rather than TCP + --connect-timeout # timeout for control connection setup (ms) + -b, --bitrate #[KMG][/#] target bitrate in bits/sec (0 for unlimited) + (default 1 Mbit/sec for UDP, unlimited for TCP) + (optional slash and packet count for burst mode) + --pacing-timer #[KMG] set the timing for pacing, in microseconds (default 1000) + --fq-rate #[KMG] enable fair-queuing based socket pacing in + bits/sec (Linux only) + -t, --time # time in seconds to transmit for (default 10 secs) + -n, --bytes #[KMG] number of bytes to transmit (instead of -t) + -k, --blockcount #[KMG] number of blocks (packets) to transmit (instead of -t or -n) + -l, --length #[KMG] length of buffer to read or write + (default 128 KB for TCP, dynamic or 1460 for UDP) + --cport bind to a specific client port (TCP and UDP, default: ephemeral port) + -P, --parallel # number of parallel client streams to run + -R, --reverse run in reverse mode (server sends, client receives) + --bidir run in bidirectional mode. + Client and server send and receive data. + -w, --window #[KMG] set window size / socket buffer size + -C, --congestion set TCP congestion control algorithm (Linux and FreeBSD only) + -M, --set-mss # set TCP/SCTP maximum segment size (MTU - 40 bytes) + -N, --no-delay set TCP/SCTP no delay, disabling Nagle's Algorithm + -4, --version4 only use IPv4 + -6, --version6 only use IPv6 + -S, --tos N set the IP type of service, 0-255. + The usual prefixes for octal and hex can be used, + i.e. 52, 064 and 0x34 all specify the same value. + --dscp N or --dscp val set the IP dscp value, either 0-63 or symbolic. + Numeric values can be specified in decimal, + octal and hex (see --tos above). + -L, --flowlabel N set the IPv6 flow label (only supported on Linux) + -Z, --zerocopy use a 'zero copy' method of sending data + -O, --omit N omit the first n seconds + -T, --title str prefix every output line with this string + --extra-data str data string to include in client and server JSON + --get-server-output get results from server + --udp-counters-64bit use 64-bit counters in UDP test packets + --repeating-payload use repeating pattern in payload, instead of + randomized payload (like in iperf2) + --username username for authentication + --rsa-public-key-path path to the RSA public key used to encrypt + authentication credentials + +[KMG] indicates options that support a K/M/G suffix for kilo-, mega-, or giga- + +iperf3 homepage at: https://software.es.net/iperf/ +Report bugs to: https://github.com/esnet/iperf +iperf3: parameter error - you must specify username (max 20 chars), password (max 20 chars) and a path to a valid public rsa client to be used""" + +TEST_RSA_KEY = """MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwuEm+iYrfSWJOupy6X3N +dxZvUCxvmoL3uoGAs0O0Y32unUQrwcTIxudy38JSuCccD+k2Rf8S4WuZSiTxaoea +6Du99YQGVZeY67uJ21SWFqWU+w6ONUj3TrNNWoICN7BXGLE2BbSBz9YaXefE3aqw +GhEjQz364Itwm425vHn2MntSp0weWb4hUCjQUyyooRXPrFUGBOuY+VvAvMyAG4Uk +msapnWnBSxXt7Tbb++A5XbOMdM2mwNYDEtkD5ksC/x3EVBrI9FvENsH9+u/8J9Mf +2oPl4MnlCMY86MQypkeUn7eVWfDnseNky7TyC0/IgCXve/iaydCCFdkjyo1MTAA4 +BQIDAQAB""" + +INVALID_PARAMS = [ + {'host': ''}, + {'host': 12}, + {'host': 'test.openwisp.io'}, + {'username': 121}, + {'password': -323}, + {'rsa_public_key': 1334}, + {'username': ''}, + {'password': 0}, + {'rsa_public_key': 0}, + { + 'username': 'openwisp-test-user', + 'password': 'open-pass', + 'rsa_public_key': -1, + }, + { + 'username': 1123, + 'password': 'rossi', + 'rsa_public_key': '', + }, + { + 'username': 'openwisp-test-user', + 'password': -214, + }, + { + 'client_options': { + 'port': 'testport', + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + } + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 'testport', + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 70000, + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': -21, + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'time': 1200000, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'time': 20, + 'tcp': {'bitrate': 10}, + 'udp': {'bitrate': '50M'}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'time': 120, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'bytes': 20, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'bytes': '', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'bytes': -1, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'connect_timeout': -1, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'connect_timeout': '11000', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'blockcount': -13, + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'blockcount': '', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'tcp': {'bitrate': '10M', 'length': 112}, + 'udp': {'bitrate': 50}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'parallel': '12', + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'parallel': 0, + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'parallel': 250, + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'bidirectional': True, + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'reverse': False, + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, + { + 'host': ['test.openwisp.io'], + 'client_options': { + 'port': 5201, + 'window': 0, + 'connect_timeout': 2000000, + 'blockcount': '100K', + 'tcp': {'bitrate': '10M'}, + 'udp': {'bitrate': 50, 'length': 9595}, + }, + }, +] diff --git a/openwisp_monitoring/check/tests/test_iperf3.py b/openwisp_monitoring/check/tests/test_iperf3.py new file mode 100644 index 000000000..dd560aa51 --- /dev/null +++ b/openwisp_monitoring/check/tests/test_iperf3.py @@ -0,0 +1,783 @@ +from json import loads +from unittest.mock import call, patch + +from django.core.cache import cache +from django.core.exceptions import ValidationError +from django.test import TransactionTestCase +from swapper import load_model + +from openwisp_controller.connection.connectors.ssh import Ssh +from openwisp_controller.connection.models import DeviceConnection as device_connection +from openwisp_controller.connection.settings import UPDATE_STRATEGIES +from openwisp_controller.connection.tests.utils import CreateConnectionsMixin, SshServer +from openwisp_monitoring.check.classes.iperf3 import get_iperf3_schema +from openwisp_monitoring.check.classes.iperf3 import logger as iperf3_logger + +from ...device.tests import TestDeviceMonitoringMixin +from .. import settings as app_settings +from ..classes import Iperf3 +from .iperf3_test_utils import ( + INVALID_PARAMS, + PARAM_ERROR, + RESULT_AUTH_FAIL, + RESULT_FAIL, + RESULT_TCP, + RESULT_UDP, + TEST_RSA_KEY, +) + +Chart = load_model('monitoring', 'Chart') +AlertSettings = load_model('monitoring', 'AlertSettings') +Metric = load_model('monitoring', 'Metric') +Check = load_model('check', 'Check') + + +class TestIperf3( + CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTestCase +): + + _IPERF3 = app_settings.CHECK_CLASSES[2][0] + _RESULT_KEYS = [ + 'iperf3_result', + 'sent_bps_tcp', + 'received_bps_tcp', + 'sent_bytes_tcp', + 'received_bytes_tcp', + 'retransmits', + 'sent_bps_udp', + 'sent_bytes_udp', + 'jitter', + 'total_packets', + 'lost_packets', + 'lost_percent', + ] + _IPERF3_TEST_SERVER = ['iperf3.openwisptestserver.com'] + _IPERF3_TEST_MULTIPLE_SERVERS = [ + 'iperf3.openwisptestserver1.com', + 'iperf3.openwisptestserver2.com', + ] + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.mock_ssh_server = SshServer( + {'root': cls._TEST_RSA_PRIVATE_KEY_PATH} + ).__enter__() + cls.ssh_server.port = cls.mock_ssh_server.port + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + cls.mock_ssh_server.__exit__() + app_settings.IPERF3_CHECK_CONFIG = {} + + def setUp(self): + ckey = self._create_credentials_with_key(port=self.ssh_server.port) + self.dc = self._create_device_connection(credentials=ckey) + self.device = self.dc.device + self.org_id = str(self.device.organization.id) + self.dc.connect() + app_settings.IPERF3_CHECK_CONFIG = { + self.org_id: {'host': self._IPERF3_TEST_SERVER} + } + self._EXPECTED_COMMAND_CALLS = [ + call( + ( + 'iperf3 -c iperf3.openwisptestserver.com -p 5201 -t 10 --connect-timeout 1 ' + '-b 0 -l 128K -w 0 -P 1 -J' + ), + raise_unexpected_exit=False, + ), + call( + ( + 'iperf3 -c iperf3.openwisptestserver.com -p 5201 -t 10 --connect-timeout 1 ' + '-b 30M -l 0 -w 0 -P 1 -u -J' + ), + raise_unexpected_exit=False, + ), + ] + self._EXPECTED_WARN_CALLS = [ + call( + ( + f'Iperf3 check failed for "{self.device}", ' + 'error - unable to connect to server: Connection refused' + ) + ), + call( + ( + f'Iperf3 check failed for "{self.device}", ' + 'error - unable to connect to server: Connection refused' + ) + ), + ] + + def _perform_iperf3_check(self): + check = Check.objects.get(check_type=self._IPERF3) + return check.perform_check(store=False) + + def _set_auth_expected_calls(self, config): + password = config[self.org_id]['password'] + username = config[self.org_id]['username'] + server = 'iperf3.openwisptestserver.com' + test_prefix = '-----BEGIN PUBLIC KEY-----\n' + test_suffix = '\n-----END PUBLIC KEY-----' + key = config[self.org_id]['rsa_public_key'] + rsa_key_path = '/tmp/iperf3-public-key.pem' + + self._EXPECTED_COMMAND_CALLS = [ + call( + ( + f'echo "{test_prefix}{key}{test_suffix}" > {rsa_key_path} && ' + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p 5201 -t 10 ' + f'--username "{username}" --rsa-public-key-path {rsa_key_path} --connect-timeout 1 ' + f'-b 0 -l 128K -w 0 -P 1 -J' + ), + raise_unexpected_exit=False, + ), + call( + ( + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p 5201 -t 10 ' + f'--username "{username}" --rsa-public-key-path {rsa_key_path} --connect-timeout 1 ' + f'-b 30M -l 0 -w 0 -P 1 -u -J ' + f'&& rm -f {rsa_key_path}' + ), + raise_unexpected_exit=False, + ), + ] + + def _assert_iperf3_fail_result(self, result): + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 0) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) + + @patch.object(Ssh, 'exec_command') + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_no_params(self, mock_warn, mock_exec_command): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + # By default check params {} + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + + @patch.object(Ssh, 'exec_command') + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_params(self, mock_warn, mock_exec_command): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + check = Check.objects.get(check_type=self._IPERF3) + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] + server = self._IPERF3_TEST_SERVER[0] + test_prefix = '-----BEGIN PUBLIC KEY-----\n' + test_suffix = '\n-----END PUBLIC KEY-----' + rsa_key_path = '/tmp/test-rsa.pem' + test_params = { + 'username': 'openwisp-test-user', + 'password': 'openwisp_pass', + 'rsa_public_key': TEST_RSA_KEY, + 'client_options': { + 'port': 6201, + 'time': 20, + 'window': '300K', + 'parallel': 5, + 'reverse': True, + 'connect_timeout': 1000, + 'tcp': {'bitrate': '10M', 'length': '128K'}, + 'udp': {'bitrate': '50M', 'length': '400K'}, + }, + } + time = test_params['client_options']['time'] + port = test_params['client_options']['port'] + window = test_params['client_options']['window'] + parallel = test_params['client_options']['parallel'] + tcp_bitrate = test_params['client_options']['tcp']['bitrate'] + tcp_len = test_params['client_options']['tcp']['length'] + udp_bitrate = test_params['client_options']['udp']['bitrate'] + udp_len = test_params['client_options']['udp']['length'] + username = test_params['username'] + password = test_params['password'] + key = test_params['rsa_public_key'] + rsa_key_path = '/tmp/iperf3-public-key.pem' + check.params = test_params + check.save() + self._EXPECTED_COMMAND_CALLS = [ + call( + ( + f'echo "{test_prefix}{key}{test_suffix}" > {rsa_key_path} && ' + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} ' + f'--username "{username}" --rsa-public-key-path {rsa_key_path} --connect-timeout 1000 ' + f'-b {tcp_bitrate} -l {tcp_len} -w {window} -P {parallel} --reverse -J' + ), + raise_unexpected_exit=False, + ), + call( + ( + f'IPERF3_PASSWORD="{password}" iperf3 -c {server} -p {port} -t {time} ' + f'--username "{username}" --rsa-public-key-path {rsa_key_path} --connect-timeout 1000 ' + f'-b {udp_bitrate} -l {udp_len} -w {window} -P {parallel} --reverse -u -J ' + f'&& rm -f {rsa_key_path}' + ), + raise_unexpected_exit=False, + ), + ] + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + + @patch.object(Ssh, 'exec_command') + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_config(self, mock_warn, mock_exec_command): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] + self._EXPECTED_COMMAND_CALLS = [ + call( + ( + 'iperf3 -c iperf3.openwisptestserver.com -p 9201 -k 1M --connect-timeout 2000 ' + '-b 10M -l 512K -w 0 -P 1 --bidir -J' + ), + raise_unexpected_exit=False, + ), + call( + ( + 'iperf3 -c iperf3.openwisptestserver.com -p 9201 -k 1M --connect-timeout 2000 ' + '-b 50M -l 256K -w 0 -P 1 --bidir -u -J' + ), + raise_unexpected_exit=False, + ), + ] + iperf3_config = { + self.org_id: { + 'host': ['iperf3.openwisptestserver.com'], + 'client_options': { + 'port': 9201, + 'time': 120, + 'connect_timeout': 2000, + 'bytes': '20M', + 'blockcount': '1M', + 'bidirectional': True, + 'tcp': {'bitrate': '10M', 'length': '512K'}, + 'udp': {'bitrate': '50M', 'length': '256K'}, + }, + } + } + with patch.object(app_settings, 'IPERF3_CHECK_CONFIG', iperf3_config): + with patch.object(Iperf3, 'schema', get_iperf3_schema()): + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + + @patch.object(iperf3_logger, 'warning') + def test_iperf3_device_connection(self, mock_warn): + dc = self.dc + with self.subTest('Test active device connection when management tunnel down'): + with patch.object( + device_connection, 'connect', return_value=False + ) as mocked_connect: + self._perform_iperf3_check() + mock_warn.assert_called_with( + f'DeviceConnection for "{self.device}" is not working, iperf3 check skipped!' + ) + self.assertEqual(mocked_connect.call_count, 1) + + with self.subTest('Test device connection is not enabled'): + dc.enabled = False + dc.save() + self._perform_iperf3_check() + mock_warn.assert_called_with( + f'Failed to get a working DeviceConnection for "{self.device}", iperf3 check skipped!' + ) + + with self.subTest('Test device connection is not with right update strategy'): + dc.update_strategy = UPDATE_STRATEGIES[1][0] + dc.is_working = True + dc.enabled = True + dc.save() + self._perform_iperf3_check() + mock_warn.assert_called_with( + f'Failed to get a working DeviceConnection for "{self.device}", iperf3 check skipped!' + ) + + def test_iperf3_check_content_object_none(self): + check = Check(name='Iperf3 check', check_type=self._IPERF3, params={}) + try: + check.check_instance.validate() + except ValidationError as e: + self.assertIn('device', str(e)) + else: + self.fail('ValidationError not raised') + + def test_iperf3_check_content_object_not_device(self): + check = Check( + name='Iperf3 check', + check_type=self._IPERF3, + content_object=self._create_user(), + params={}, + ) + try: + check.check_instance.validate() + except ValidationError as e: + self.assertIn('device', str(e)) + else: + self.fail('ValidationError not raised') + + def test_iperf3_check_schema_violation(self): + for invalid_param in INVALID_PARAMS: + check = Check( + name='Iperf3 check', + check_type=self._IPERF3, + content_object=self.device, + params=invalid_param, + ) + try: + check.check_instance.validate() + except ValidationError as e: + self.assertIn('Invalid param', str(e)) + else: + self.fail('ValidationError not raised') + + @patch.object(Ssh, 'exec_command') + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check(self, mock_warn, mock_exec_command): + error = "ash: iperf3: not found" + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] + iperf3_json_error_config = { + self.org_id: { + 'host': ['iperf3.openwisptestserver.com'], + 'username': 'test', + 'password': 'testpass', + 'rsa_public_key': 'INVALID_RSA_KEY', + } + } + with patch.object( + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_json_error_config + ): + with self.subTest('Test iperf3 errors not in json format'): + mock_exec_command.side_effect = [(PARAM_ERROR, 1), (PARAM_ERROR, 1)] + EXPECTED_WARN_CALLS = [ + call( + f'Iperf3 check failed for "{self.device}", error - {PARAM_ERROR}' + ), + call( + f'Iperf3 check failed for "{self.device}", error - {PARAM_ERROR}' + ), + ] + self._perform_iperf3_check() + self.assertEqual(mock_warn.call_count, 2) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(EXPECTED_WARN_CALLS) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 is not installed on the device'): + mock_exec_command.side_effect = [(error, 127)] + self._perform_iperf3_check() + mock_warn.assert_called_with( + f'Iperf3 is not installed on the "{self.device}", error - {error}' + ) + self.assertEqual(mock_warn.call_count, 1) + self.assertEqual(mock_exec_command.call_count, 1) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 check passes in both TCP & UDP'): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + self.assertEqual(Chart.objects.count(), 2) + self.assertEqual(Metric.objects.count(), 2) + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bps_tcp'], + tcp_result['sum_received']['bits_per_second'], + ) + self.assertEqual(result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes']) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual( + result['retransmits'], tcp_result['sum_sent']['retransmits'] + ) + self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) + self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(result['lost_percent'], udp_result['lost_percent']) + self.assertEqual(Chart.objects.count(), 8) + self.assertEqual(Check.objects.count(), 3) + iperf3_metric = Metric.objects.get(key='iperf3') + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(iperf3_metric.content_object, self.device) + points = iperf3_metric.read(limit=None, extra_fields=list(result.keys())) + self.assertEqual(len(points), 1) + self.assertEqual(points[0]['iperf3_result'], result['iperf3_result']) + self.assertEqual(points[0]['sent_bps_tcp'], result['sent_bps_tcp']) + self.assertEqual( + points[0]['received_bytes_tcp'], result['received_bytes_tcp'] + ) + self.assertEqual(points[0]['retransmits'], result['retransmits']) + self.assertEqual(points[0]['sent_bps_udp'], result['sent_bps_udp']) + self.assertEqual(points[0]['sent_bytes_udp'], result['sent_bytes_udp']) + self.assertEqual(points[0]['jitter'], result['jitter']) + self.assertEqual(points[0]['total_packets'], result['total_packets']) + self.assertEqual(points[0]['lost_packets'], result['lost_packets']) + self.assertEqual(points[0]['lost_percent'], result['lost_percent']) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 check fails in both TCP & UDP'): + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] + result = self._perform_iperf3_check() + self._assert_iperf3_fail_result(result) + self.assertEqual(Chart.objects.count(), 8) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_warn.call_count, 2) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 check TCP pass UDP fail'): + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_FAIL, 1)] + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bps_tcp'], + tcp_result['sum_received']['bits_per_second'], + ) + self.assertEqual(result['sent_bytes_tcp'], tcp_result['sum_sent']['bytes']) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual( + result['retransmits'], tcp_result['sum_sent']['retransmits'] + ) + self.assertEqual(result['sent_bps_udp'], 0.0) + self.assertEqual(result['sent_bytes_udp'], 0) + self.assertEqual(result['jitter'], 0.0) + self.assertEqual(result['total_packets'], 0) + self.assertEqual(result['lost_percent'], 0.0) + self.assertEqual(Chart.objects.count(), 8) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_warn.call_count, 1) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 check TCP fail UDP pass'): + mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_UDP, 0)] + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual(result['sent_bps_tcp'], 0.0) + self.assertEqual(result['received_bps_tcp'], 0.0) + self.assertEqual(result['sent_bytes_tcp'], 0) + self.assertEqual(result['received_bytes_tcp'], 0) + self.assertEqual(result['retransmits'], 0) + self.assertEqual(result['sent_bps_udp'], udp_result['bits_per_second']) + self.assertEqual(result['sent_bytes_udp'], udp_result['bytes']) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(result['lost_percent'], udp_result['lost_percent']) + self.assertEqual(Chart.objects.count(), 8) + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(mock_warn.call_count, 1) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS[1:]) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + + @patch.object(Ssh, 'exec_command') + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_auth_config(self, mock_warn, mock_exec_command): + iperf3_config = { + self.org_id: { + 'host': self._IPERF3_TEST_SERVER, + 'username': 'test', + 'password': 'testpass', + 'rsa_public_key': TEST_RSA_KEY, + } + } + iperf3_conf_wrong_pass = { + self.org_id: { + 'host': self._IPERF3_TEST_SERVER, + 'username': 'test', + 'password': 'wrongpass', + 'rsa_public_key': TEST_RSA_KEY, + } + } + iperf3_conf_wrong_user = { + self.org_id: { + 'host': self._IPERF3_TEST_SERVER, + 'username': 'wronguser', + 'password': 'testpass', + 'rsa_public_key': TEST_RSA_KEY, + } + } + auth_error = "test authorization failed" + tcp_result = loads(RESULT_TCP)['end'] + udp_result = loads(RESULT_UDP)['end']['sum'] + + self._EXPECTED_WARN_CALLS = [ + call(f'Iperf3 check failed for "{self.device}", error - {auth_error}'), + call(f'Iperf3 check failed for "{self.device}", error - {auth_error}'), + ] + with self.subTest('Test iperf3 check with right config'): + with patch.object( + app_settings, + 'IPERF3_CHECK_CONFIG', + iperf3_config + # It is required to mock "Iperf3.schema" here so that it + # uses the updated configuration from "IPERF3_CHECK_CONFIG" setting. + ), patch.object(Iperf3, 'schema', get_iperf3_schema()): + self._set_auth_expected_calls(iperf3_config) + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + result = self._perform_iperf3_check() + for key in self._RESULT_KEYS: + self.assertIn(key, result) + self.assertEqual(result['iperf3_result'], 1) + self.assertEqual( + result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] + ) + self.assertEqual( + result['received_bytes_tcp'], tcp_result['sum_received']['bytes'] + ) + self.assertEqual(result['jitter'], udp_result['jitter_ms']) + self.assertEqual(result['total_packets'], udp_result['packets']) + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 check with wrong password'): + with patch.object( + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_conf_wrong_pass + ), patch.object(Iperf3, 'schema', get_iperf3_schema()): + self._set_auth_expected_calls(iperf3_conf_wrong_pass) + mock_exec_command.side_effect = [ + (RESULT_AUTH_FAIL, 1), + (RESULT_AUTH_FAIL, 1), + ] + + result = self._perform_iperf3_check() + self._assert_iperf3_fail_result(result) + self.assertEqual(mock_warn.call_count, 2) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest('Test iperf3 check with wrong username'): + with patch.object( + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_conf_wrong_user + ), patch.object(Iperf3, 'schema', get_iperf3_schema()): + self._set_auth_expected_calls(iperf3_conf_wrong_user) + mock_exec_command.side_effect = [ + (RESULT_AUTH_FAIL, 1), + (RESULT_AUTH_FAIL, 1), + ] + + result = self._perform_iperf3_check() + self._assert_iperf3_fail_result(result) + self.assertEqual(mock_warn.call_count, 2) + self.assertEqual(mock_exec_command.call_count, 2) + mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) + mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) + + @patch.object(Ssh, 'exec_command') + @patch.object(iperf3_logger, 'warning') + @patch.object(iperf3_logger, 'info') + @patch.object(cache, 'add') + def test_iperf3_check_task_with_multiple_server_config(self, *args): + mock_add = args[0] + mock_info = args[1] + mock_warn = args[2] + mock_exec_command = args[3] + org = self.device.organization + iperf3_multiple_server_config = { + self.org_id: {'host': self._IPERF3_TEST_MULTIPLE_SERVERS} + } + check = Check.objects.get(check_type=self._IPERF3) + + self._EXPECTED_COMMAND_CALLS_SERVER_1 = [ + call( + ( + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[0]} -p 5201 -t 10 --connect-timeout 1 ' + '-b 0 -l 128K -w 0 -P 1 -J' + ), + raise_unexpected_exit=False, + ), + call( + ( + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[0]} -p 5201 -t 10 --connect-timeout 1 ' + '-b 30M -l 0 -w 0 -P 1 -u -J' + ), + raise_unexpected_exit=False, + ), + ] + self._EXPECTED_COMMAND_CALLS_SERVER_2 = [ + call( + ( + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[1]} -p 5201 -t 10 --connect-timeout 1 ' + '-b 0 -l 128K -w 0 -P 1 -J' + ), + raise_unexpected_exit=False, + ), + call( + ( + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[1]} -p 5201 -t 10 --connect-timeout 1 ' + '-b 30M -l 0 -w 0 -P 1 -u -J' + ), + raise_unexpected_exit=False, + ), + ] + + with patch.object(app_settings, 'IPERF3_CHECK_CONFIG', {}): + with self.subTest('Test iperf3 check without config'): + self._perform_iperf3_check() + mock_warn.assert_called_with( + ( + f'Iperf3 servers for organization "{org}" ' + f'is not configured properly, iperf3 check skipped!' + ) + ) + self.assertEqual(mock_warn.call_count, 1) + mock_warn.reset_mock() + + with patch.object( + app_settings, + 'IPERF3_CHECK_CONFIG', + {'invalid_org_uuid': {'host': self._IPERF3_TEST_SERVER, 'time': 10}}, + ): + with self.subTest('Test iperf3 check with invalid config'): + self._perform_iperf3_check() + mock_warn.assert_called_with( + ( + f'Iperf3 servers for organization "{org}" ' + f'is not configured properly, iperf3 check skipped!' + ) + ) + self.assertEqual(mock_warn.call_count, 1) + mock_warn.reset_mock() + + with patch.object( + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_multiple_server_config + ): + with self.subTest( + 'Test iperf3 check when all iperf3 servers are available' + ): + mock_add.return_value = True + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + self._perform_iperf3_check() + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_add.call_count, 1) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls( + self._EXPECTED_COMMAND_CALLS_SERVER_1 + ) + mock_add.reset_mock() + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest( + 'Test iperf3 check when single iperf3 server are available' + ): + mock_add.side_effect = [False, True] + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + self._perform_iperf3_check() + self.assertEqual(mock_warn.call_count, 0) + self.assertEqual(mock_add.call_count, 2) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls( + self._EXPECTED_COMMAND_CALLS_SERVER_2 + ) + mock_add.reset_mock() + mock_warn.reset_mock() + mock_exec_command.reset_mock() + + with self.subTest( + 'Test iperf3 check when all iperf3 servers are occupied initially' + ): + # If all available iperf3 servers are occupied initially, + # then push the task back in the queue and acquire the iperf3 + # server only after completion of previous running checks + mock_add.side_effect = [False, False, True] + mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] + self._perform_iperf3_check() + mock_info.has_called_with( + ( + f'At the moment, all available iperf3 servers of organization "{org}" ' + f'are busy running checks, putting "{check}" back in the queue..' + ) + ) + self.assertEqual(mock_info.call_count, 4) + self.assertEqual(mock_add.call_count, 3) + self.assertEqual(mock_exec_command.call_count, 2) + mock_exec_command.assert_has_calls( + self._EXPECTED_COMMAND_CALLS_SERVER_1 + ) diff --git a/openwisp_monitoring/check/tests/test_models.py b/openwisp_monitoring/check/tests/test_models.py index abbf8ed13..93ae7a98a 100644 --- a/openwisp_monitoring/check/tests/test_models.py +++ b/openwisp_monitoring/check/tests/test_models.py @@ -9,8 +9,8 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings as app_settings -from ..classes import ConfigApplied, Ping -from ..tasks import auto_create_config_check, auto_create_ping +from ..classes import ConfigApplied, Iperf3, Ping +from ..tasks import auto_create_config_check, auto_create_iperf3_check, auto_create_ping Check = load_model('check', 'Check') Metric = load_model('monitoring', 'Metric') @@ -22,6 +22,7 @@ class TestModels(TestDeviceMonitoringMixin, TransactionTestCase): _PING = app_settings.CHECK_CLASSES[0][0] _CONFIG_APPLIED = app_settings.CHECK_CLASSES[1][0] + _IPERF3 = app_settings.CHECK_CLASSES[2][0] def test_check_str(self): c = Check(name='Test check') @@ -48,6 +49,12 @@ def test_check_class(self): check_type=self._CONFIG_APPLIED, ) self.assertEqual(c.check_class, ConfigApplied) + with self.subTest('Test Iperf3 check Class'): + c = Check( + name='Iperf3 class check', + check_type=self._IPERF3, + ) + self.assertEqual(c.check_class, Iperf3) def test_base_check_class(self): path = 'openwisp_monitoring.check.classes.base.BaseCheck' @@ -82,6 +89,18 @@ def test_check_instance(self): self.assertEqual(i.related_object, obj) self.assertEqual(i.params, c.params) + with self.subTest('Test Iperf3 check instance'): + c = Check( + name='Iperf3 class check', + check_type=self._IPERF3, + content_object=obj, + params={}, + ) + i = c.check_instance + self.assertIsInstance(i, Iperf3) + self.assertEqual(i.related_object, obj) + self.assertEqual(i.params, c.params) + def test_validation(self): with self.subTest('Test Ping check validation'): check = Check(name='Ping check', check_type=self._PING, params={}) @@ -105,7 +124,7 @@ def test_validation(self): def test_auto_check_creation(self): self.assertEqual(Check.objects.count(), 0) d = self._create_device(organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) with self.subTest('Test AUTO_PING'): c1 = Check.objects.filter(check_type=self._PING).first() self.assertEqual(c1.content_object, d) @@ -114,11 +133,15 @@ def test_auto_check_creation(self): c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() self.assertEqual(c2.content_object, d) self.assertEqual(self._CONFIG_APPLIED, c2.check_type) + with self.subTest('Test AUTO_IPERF3'): + c3 = Check.objects.filter(check_type=self._IPERF3).first() + self.assertEqual(c3.content_object, d) + self.assertEqual(self._IPERF3, c3.check_type) def test_device_deleted(self): self.assertEqual(Check.objects.count(), 0) d = self._create_device(organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) d.delete() self.assertEqual(Check.objects.count(), 0) @@ -129,7 +152,7 @@ def test_config_modified_device_problem(self): self._create_config(status='modified', organization=self._create_org()) d = Device.objects.first() d.monitoring.update_status('ok') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) self.assertEqual(Metric.objects.count(), 0) self.assertEqual(AlertSettings.objects.count(), 0) check = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() @@ -159,7 +182,7 @@ def test_config_error(self): self._create_config(status='error', organization=self._create_org()) dm = Device.objects.first().monitoring dm.update_status('ok') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) self.assertEqual(Metric.objects.count(), 0) self.assertEqual(AlertSettings.objects.count(), 0) check = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() @@ -192,7 +215,7 @@ def test_config_error(self): @patch('openwisp_monitoring.check.settings.AUTO_PING', False) def test_config_check_critical_metric(self): self._create_config(status='modified', organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) d = Device.objects.first() dm = d.monitoring dm.update_status('ok') @@ -211,7 +234,7 @@ def test_config_check_critical_metric(self): def test_no_duplicate_check_created(self): self._create_config(organization=self._create_org()) - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) d = Device.objects.first() auto_create_config_check.delay( model=Device.__name__.lower(), @@ -223,13 +246,18 @@ def test_no_duplicate_check_created(self): app_label=Device._meta.app_label, object_id=str(d.pk), ) - self.assertEqual(Check.objects.count(), 2) + auto_create_iperf3_check.delay( + model=Device.__name__.lower(), + app_label=Device._meta.app_label, + object_id=str(d.pk), + ) + self.assertEqual(Check.objects.count(), 3) def test_device_unreachable_no_config_check(self): self._create_config(status='modified', organization=self._create_org()) d = self.device_model.objects.first() d.monitoring.update_status('critical') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() c2.perform_check() self.assertEqual(Metric.objects.count(), 0) @@ -240,7 +268,7 @@ def test_device_unknown_no_config_check(self): self._create_config(status='modified', organization=self._create_org()) d = self.device_model.objects.first() d.monitoring.update_status('unknown') - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() c2.perform_check() self.assertEqual(Metric.objects.count(), 0) diff --git a/openwisp_monitoring/check/tests/test_ping.py b/openwisp_monitoring/check/tests/test_ping.py index 11b9ee47b..23b94652d 100644 --- a/openwisp_monitoring/check/tests/test_ping.py +++ b/openwisp_monitoring/check/tests/test_ping.py @@ -239,7 +239,7 @@ def test_store_result(self, mocked_method): device.management_ip = '10.40.0.1' device.save() # check created automatically by autoping - self.assertEqual(Check.objects.count(), 2) + self.assertEqual(Check.objects.count(), 3) self.assertEqual(Metric.objects.count(), 0) self.assertEqual(Chart.objects.count(), 0) self.assertEqual(AlertSettings.objects.count(), 0) diff --git a/openwisp_monitoring/db/backends/influxdb/queries.py b/openwisp_monitoring/db/backends/influxdb/queries.py index 23a4b8ee9..965200c76 100644 --- a/openwisp_monitoring/db/backends/influxdb/queries.py +++ b/openwisp_monitoring/db/backends/influxdb/queries.py @@ -98,6 +98,51 @@ "object_id = '{object_id}' GROUP BY time(1d)" ) }, + 'bandwidth': { + 'influxdb': ( + "SELECT MEAN(sent_bps_tcp) / 1000000000 AS TCP, " + "MEAN(sent_bps_udp) / 1000000000 AS UDP FROM {key} WHERE " + "time >= '{time}' AND content_type = '{content_type}' AND " + "object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'transfer': { + 'influxdb': ( + "SELECT SUM(sent_bytes_tcp) / 1000000000 AS TCP," + "SUM(sent_bytes_udp) / 1000000000 AS UDP FROM {key} WHERE " + "time >= '{time}' AND content_type = '{content_type}' AND " + "object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'retransmits': { + 'influxdb': ( + "SELECT MEAN(retransmits) AS retransmits FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'jitter': { + 'influxdb': ( + "SELECT MEAN(jitter) AS jitter FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'datagram': { + 'influxdb': ( + "SELECT MEAN(lost_packets) AS lost_datagram," + "MEAN(total_packets) AS total_datagram FROM {key} WHERE " + "time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, + 'datagram_loss': { + 'influxdb': ( + "SELECT MEAN(lost_percent) AS datagram_loss FROM {key} " + "WHERE time >= '{time}' AND content_type = '{content_type}' " + "AND object_id = '{object_id}' GROUP BY time(1d)" + ) + }, } default_chart_query = [ diff --git a/openwisp_monitoring/device/admin.py b/openwisp_monitoring/device/admin.py index e415e4732..cc92eea4c 100644 --- a/openwisp_monitoring/device/admin.py +++ b/openwisp_monitoring/device/admin.py @@ -48,26 +48,61 @@ def full_clean(self): obj = form.instance if not obj.content_type or not obj.object_id: setattr( - form.instance, + obj, self.ct_field.get_attname(), ContentType.objects.get_for_model(self.instance).pk, ) - setattr(form.instance, self.ct_fk_field.get_attname(), self.instance.pk) + setattr(obj, self.ct_fk_field.get_attname(), self.instance.pk) super().full_clean() -class CheckInline(GenericStackedInline): +class InlinePermissionMixin: + def has_add_permission(self, request, obj=None): + # User will be able to add objects from inline even + # if it only has permission to add a model object + return super().has_add_permission(request, obj) or request.user.has_perm( + f'{self.model._meta.app_label}.add_{self.inline_permission_suffix}' + ) + + def has_change_permission(self, request, obj=None): + return super().has_change_permission(request, obj) or request.user.has_perm( + f'{self.model._meta.app_label}.change_{self.inline_permission_suffix}' + ) + + def has_view_permission(self, request, obj=None): + return super().has_view_permission(request, obj) or request.user.has_perm( + f'{self.model._meta.app_label}.view_{self.inline_permission_suffix}' + ) + + def has_delete_permission(self, request, obj=None): + return super().has_delete_permission(request, obj) or request.user.has_perm( + f'{self.model._meta.app_label}.delete_{self.inline_permission_suffix}' + ) + + +class CheckInline(InlinePermissionMixin, GenericStackedInline): model = Check extra = 0 formset = CheckInlineFormSet - fields = ['check_type', 'is_active'] - readonly_fields = ['check_type'] + fields = [ + 'is_active', + 'check_type', + ] + inline_permission_suffix = 'check_inline' - def has_add_permission(self, request, obj=None): - return False + def get_fields(self, request, obj=None): + if not self.has_change_permission(request, obj) or not self.has_view_permission( + request, obj + ): + return ['check_type', 'is_active'] + return super().get_fields(request, obj) - def has_delete_permission(self, request, obj=None): - return False + def get_readonly_fields(self, request, obj=None): + if not self.has_change_permission(request, obj) or not self.has_view_permission( + request, obj + ): + return ['check_type'] + return super().get_readonly_fields(request, obj) class AlertSettingsForm(ModelForm): @@ -81,43 +116,92 @@ def __init__(self, *args, **kwargs): } super().__init__(*args, **kwargs) - -class AlertSettingsInline(NestedStackedInline): + def _post_clean(self): + self.instance._delete_instance = False + if all( + self.cleaned_data[field] is None + for field in [ + 'custom_operator', + 'custom_threshold', + 'custom_tolerance', + ] + ): + # "_delete_instance" flag signifies that + # the fields have been set to None by the + # user. Hence, the object should be deleted. + self.instance._delete_instance = True + super()._post_clean() + + def save(self, commit=True): + if self.instance._delete_instance: + self.instance.delete() + return self.instance + return super().save(commit) + + +class AlertSettingsInline(InlinePermissionMixin, NestedStackedInline): model = AlertSettings - extra = 0 - max_num = 0 + extra = 1 + max_num = 1 exclude = ['created', 'modified'] form = AlertSettingsForm + inline_permission_suffix = 'alertsettings_inline' def get_queryset(self, request): return super().get_queryset(request).order_by('created') - def has_add_permission(self, request, obj=None): - return False - - def has_delete_permission(self, request, obj=None): - return False - -class MetricInline(NestedGenericStackedInline): +class MetricInline(InlinePermissionMixin, NestedGenericStackedInline): model = Metric extra = 0 inlines = [AlertSettingsInline] + fieldsets = [ + ( + None, + { + 'fields': ( + 'name', + 'is_healthy', + ) + }, + ), + ( + _('Advanced options'), + {'classes': ('collapse',), 'fields': ('field_name',)}, + ), + ] + readonly_fields = ['name', 'is_healthy'] - fields = ['name', 'is_healthy'] # Explicitly changed name from Metrics to Alert Settings verbose_name = _('Alert Settings') verbose_name_plural = verbose_name + inline_permission_suffix = 'alertsettings_inline' + # Ordering queryset by metric name + ordering = ('name',) + + def get_fieldsets(self, request, obj=None): + if not self.has_change_permission(request, obj) or not self.has_view_permission( + request, obj + ): + return [ + (None, {'fields': ('is_healthy',)}), + ] + return super().get_fieldsets(request, obj) + + def get_queryset(self, request): + # Only show 'Metrics' that have 'AlertSettings' objects + return super().get_queryset(request).filter(alertsettings__isnull=False) def has_add_permission(self, request, obj=None): + # We need to restrict the users from adding the 'metrics' since + # they're created by the system automatically with default 'alertsettings' return False def has_delete_permission(self, request, obj=None): + # We need to restrict the users from deleting the 'metrics' since + # they're created by the system automatically with default 'alertsettings' return False - def get_queryset(self, request): - return super().get_queryset(request).filter(alertsettings__isnull=False) - class DeviceAdmin(BaseDeviceAdmin, NestedModelAdmin): change_form_template = 'admin/config/device/change_form.html' @@ -220,6 +304,7 @@ def get_inlines(self, request, obj=None): if not hasattr(inline, 'sortable_options'): inline.sortable_options = {'disabled': True} if not obj or obj._state.adding: + inlines.remove(CheckInline) inlines.remove(MetricInline) return inlines diff --git a/openwisp_monitoring/device/tests/test_admin.py b/openwisp_monitoring/device/tests/test_admin.py index c2bdf1b9a..586df4610 100644 --- a/openwisp_monitoring/device/tests/test_admin.py +++ b/openwisp_monitoring/device/tests/test_admin.py @@ -1,6 +1,7 @@ from copy import deepcopy from django.contrib.auth import get_user_model +from django.contrib.auth.models import Permission from django.contrib.contenttypes.forms import generic_inlineformset_factory from django.test import TestCase from django.urls import reverse @@ -19,6 +20,7 @@ Chart = load_model('monitoring', 'Chart') Metric = load_model('monitoring', 'Metric') +AlertSettings = load_model('monitoring', 'AlertSettings') DeviceData = load_model('device_monitoring', 'DeviceData') WifiClient = load_model('device_monitoring', 'WifiClient') WifiSession = load_model('device_monitoring', 'WifiSession') @@ -28,6 +30,9 @@ Device = load_model('config', 'Device') DeviceLocation = load_model('geo', 'DeviceLocation') Location = load_model('geo', 'Location') +# model_name changes if swapped +check_model_name = get_model_name('check', 'Check').lower().replace('.', '-') +metric_model_name = get_model_name('monitoring', 'Metric').lower().replace('.', '-') class TestAdmin( @@ -40,6 +45,38 @@ class TestAdmin( resources_fields = TestImportExportMixin.resource_fields resources_fields.append('monitoring__status') app_label = 'config' + _device_params = { + 'group': '', + 'management_ip': '', + 'model': '', + 'os': '', + 'system': '', + 'notes': '', + 'config-TOTAL_FORMS': '0', + 'config-INITIAL_FORMS': '0', + 'config-MIN_NUM_FORMS': '0', + 'config-MAX_NUM_FORMS': '1', + # devicelocation + 'devicelocation-TOTAL_FORMS': '0', + 'devicelocation-INITIAL_FORMS': '0', + 'devicelocation-MIN_NUM_FORMS': '0', + 'devicelocation-MAX_NUM_FORMS': '1', + # deviceconnection + 'deviceconnection_set-TOTAL_FORMS': '0', + 'deviceconnection_set-INITIAL_FORMS': '0', + 'deviceconnection_set-MIN_NUM_FORMS': '0', + 'deviceconnection_set-MAX_NUM_FORMS': '1000', + # command + 'command_set-TOTAL_FORMS': '0', + 'command_set-INITIAL_FORMS': '0', + 'command_set-MIN_NUM_FORMS': '0', + 'command_set-MAX_NUM_FORMS': '1000', + # check + f'{check_model_name}-content_type-object_id-TOTAL_FORMS': '0', + f'{check_model_name}-content_type-object_id-INITIAL_FORMS': '0', + f'{check_model_name}-content_type-object_id-MIN_NUM_FORMS': '0', + f'{check_model_name}-content_type-object_id-MAX_NUM_FORMS': '1000', + } def setUp(self): self._login_admin() @@ -126,7 +163,6 @@ def test_device_add_view(self): self.assertContains(r, '

    Configuration

    ') self.assertContains(r, '

    Map

    ') self.assertContains(r, '

    Credentials

    ') - self.assertContains(r, '

    Checks

    ') def test_remove_invalid_interface(self): d = self._create_device(organization=self._create_org()) @@ -265,9 +301,7 @@ def test_check_inline_formset(self): check_inline_formset = generic_inlineformset_factory( model=Check, form=CheckInline.form, formset=CheckInlineFormSet ) - # model_name changes if swapped - model_name = get_model_name('check', 'Check').lower().replace('.', '-') - ct = f'{model_name}-content_type-object_id' + ct = f'{check_model_name}-content_type-object_id' data = { f'{ct}-TOTAL_FORMS': '1', f'{ct}-INITIAL_FORMS': '0', @@ -345,6 +379,254 @@ def test_wifisession_inline(self): self.assertNotContains(response, '

    WiFi Sessions

    ') self.assertNotContains(response, 'monitoring-wifisession-changelist-url') + def test_check_alertsetting_inline(self): + test_user = self._create_user( + username='test', email='test@inline.com', is_staff=True + ) + self._create_org_user(is_admin=True, user=test_user) + device = self._create_device() + ping_check = Check( + check_type=CHECK_CLASSES[0][0], content_object=device, params={} + ) + ping_check.full_clean() + ping_check.save() + url = reverse('admin:config_device_change', args=[device.pk]) + metric = self._create_general_metric( + name='', content_object=device, configuration='ping' + ) + self._create_alert_settings(metric=metric) + self.client.force_login(test_user) + + def _add_device_permissions(user): + test_user.user_permissions.clear() + self.assertEqual(user.user_permissions.count(), 0) + device_permissions = Permission.objects.filter(codename__endswith='device') + # Permissions required to access device page + test_user.user_permissions.add(*device_permissions), + self.assertEqual(user.user_permissions.count(), 4) + + def _add_user_permissions(user, permission_query, expected_perm_count): + user.user_permissions.add(*Permission.objects.filter(**permission_query)) + self.assertEqual(user.user_permissions.count(), expected_perm_count) + + def _assert_check_inline_in_response(response): + self.assertContains(response, '

    Checks

    ', html=True) + self.assertContains(response, 'check-content_type-object_id-0-is_active') + self.assertContains(response, 'check-content_type-object_id-0-check_type') + self.assertContains(response, 'check-content_type-object_id-0-DELETE') + + def _assert_alertsettings_inline_in_response(response): + self.assertContains(response, '

    Alert Settings

    ', html=True) + self.assertContains(response, 'form-row field-name') + self.assertContains( + response, + 'True', + html=True, + ) + self.assertContains(response, '

    Advanced options

    ', html=True) + self.assertContains( + response, + 'metric-content_type-object_id-0-alertsettings-0-is_active', + ) + self.assertContains( + response, '' + ) + self.assertContains( + response, + 'metric-content_type-object_id-0-alertsettings-0-custom_threshold" value="1"', + ) + self.assertContains( + response, + 'metric-content_type-object_id-0-alertsettings-0-custom_tolerance" value="0"', + ) + + with self.subTest( + 'Test when a user does not have permission to access models or inline' + ): + _add_device_permissions(test_user) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + self.assertNotContains(response, '

    Checks

    ', html=True) + self.assertNotContains(response, '

    Alert Settings

    ', html=True) + + with self.subTest('Test check & alert settings with model permissions'): + _add_device_permissions(test_user) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + _add_user_permissions(test_user, {'codename__endswith': 'check'}, 8) + _add_user_permissions(test_user, {'codename__endswith': 'metric'}, 12) + _add_user_permissions( + test_user, {'codename__endswith': 'alertsettings'}, 16 + ) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + _assert_check_inline_in_response(response) + _assert_alertsettings_inline_in_response(response) + + with self.subTest('Test all inline permissions'): + _add_device_permissions(test_user) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + _add_user_permissions(test_user, {'codename__endswith': 'inline'}, 12) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + _assert_check_inline_in_response(response) + _assert_alertsettings_inline_in_response(response) + + with self.subTest('Test view inline permissions'): + _add_device_permissions(test_user) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + _add_user_permissions( + test_user, {'codename__endswith': 'view_check_inline'}, 5 + ) + _add_user_permissions( + test_user, {'codename__endswith': 'view_alertsettings_inline'}, 6 + ) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + self.assertContains(response, '

    Checks

    ', html=True) + self.assertContains(response, 'form-row field-check_type') + self.assertContains(response, 'form-row field-is_active') + self.assertContains(response, '

    Alert Settings

    ', html=True) + self.assertContains(response, 'form-row field-is_healthy djn-form-row-last') + self.assertContains( + response, + 'True', + html=True, + ) + self.assertContains( + response, + 'form-row field-is_active', + ) + self.assertContains(response, 'form-row field-custom_operator') + self.assertContains( + response, + 'form-row field-custom_threshold', + ) + self.assertContains( + response, + 'form-row field-custom_tolerance', + ) + + def test_alert_settings_inline_post(self): + device = self._create_device() + metric = self._create_general_metric( + name='', content_object=device, configuration='iperf3' + ) + url = reverse('admin:config_device_change', args=[device.pk]) + alertsettings = self._create_alert_settings(metric=metric) + test_inline_params = { + 'name': device.name, + 'organization': str(device.organization.id), + 'mac_address': device.mac_address, + 'key': device.key, + # metric & alertsettings + f'{metric_model_name}-content_type-object_id-TOTAL_FORMS': '1', + f'{metric_model_name}-content_type-object_id-INITIAL_FORMS': '1', + f'{metric_model_name}-content_type-object_id-MIN_NUM_FORMS': '0', + f'{metric_model_name}-content_type-object_id-MAX_NUM_FORMS': '1000', + f'{metric_model_name}-content_type-object_id-0-field_name': 'iperf3_result', + f'{metric_model_name}-content_type-object_id-0-id': str(metric.id), + f'{metric_model_name}-content_type-object_id-0-alertsettings-TOTAL_FORMS': '1', + f'{metric_model_name}-content_type-object_id-0-alertsettings-INITIAL_FORMS': '0', + f'{metric_model_name}-content_type-object_id-0-alertsettings-MIN_NUM_FORMS': '0', + f'{metric_model_name}-content_type-object_id-0-alertsettings-MAX_NUM_FORMS': '1', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-is_active': 'on', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_operator': '<', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_threshold': '9', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_tolerance': '1800', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-id': '', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-metric': '', + } + # General metrics (clients & traffic) & Iperf3 are present + self.assertEqual(Metric.objects.count(), 3) + self.assertEqual(AlertSettings.objects.count(), 1) + + def _reset_alertsettings_inline(): + AlertSettings.objects.all().delete() + + # Delete AlertSettings objects before any subTests + _reset_alertsettings_inline() + # Delete all Metrics other than 'iperf3' before any subTests + Metric.objects.exclude(configuration='iperf3').delete() + + def _assert_alertsettings_inline(response, operator, threshold, tolerance): + self.assertEqual(response.status_code, 302) + self.assertEqual(Metric.objects.count(), 1) + self.assertEqual(AlertSettings.objects.count(), 1) + alertsettings = AlertSettings.objects.first() + self.assertEqual(alertsettings.operator, operator) + self.assertEqual(alertsettings.threshold, threshold) + self.assertEqual(alertsettings.tolerance, tolerance) + + with self.subTest('Test alert settings inline when all fields are provided'): + self._device_params.update(test_inline_params) + response = self.client.post(url, self._device_params) + _assert_alertsettings_inline(response, '<', 9, 1800) + _reset_alertsettings_inline() + + with self.subTest( + 'Test alert settings inline when partial fields are provided' + ): + test_inline_default_1 = { + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_operator': '>', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_threshold': '', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_tolerance': '', + } + self._device_params.update(test_inline_default_1) + response = self.client.post(url, self._device_params) + # 'threshold' and 'tolerance' are set to their default values + _assert_alertsettings_inline(response, '>', 1, 0) + _reset_alertsettings_inline() + + test_inline_default_2 = { + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_operator': '', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_threshold': '18', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_tolerance': '99', + } + self._device_params.update(test_inline_default_2) + response = self.client.post(url, self._device_params) + # 'operator' are set to their default values + _assert_alertsettings_inline(response, '<', 18, 99) + _reset_alertsettings_inline() + + with self.subTest('Test alert settings inline when all fields are absent'): + test_inline_params_present = { + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_operator': '<', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_threshold': '99', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_tolerance': '1880', + } + self._device_params.update(test_inline_params_present) + response = self.client.post(url, self._device_params) + _assert_alertsettings_inline(response, '<', 99, 1880) + + alertsettings = AlertSettings.objects.first() + metric = Metric.objects.first() + + test_inline_params_absent = { + f'{metric_model_name}-content_type-object_id-INITIAL_FORMS': '1', + f'{metric_model_name}-content_type-object_id-0-id': str(metric.id), + f'{metric_model_name}-content_type-object_id-0-field_name': 'iperf3_result', + f'{metric_model_name}-content_type-object_id-0-alertsettings-INITIAL_FORMS': '1', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-id': str( + alertsettings.id + ), + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-metric': str( + metric.id + ), + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_operator': '', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_threshold': '', + f'{metric_model_name}-content_type-object_id-0-alertsettings-0-custom_tolerance': '', + } + self._device_params.update(test_inline_params_absent) + response = self.client.post(url, self._device_params) + # If all the fields are empty, then it deletes the AlertSettings object + # to prevent the default value from being used as a fallback + self.assertEqual(response.status_code, 302) + self.assertEqual(Metric.objects.count(), 1) + self.assertEqual(AlertSettings.objects.count(), 0) + class TestAdminDashboard(TestGeoMixin, DeviceMonitoringTestCase): location_model = Location diff --git a/openwisp_monitoring/monitoring/base/models.py b/openwisp_monitoring/monitoring/base/models.py index ab1df2074..bde1c8957 100644 --- a/openwisp_monitoring/monitoring/base/models.py +++ b/openwisp_monitoring/monitoring/base/models.py @@ -44,7 +44,12 @@ class AbstractMetric(TimeStampedEditableModel): key = models.SlugField( max_length=64, blank=True, help_text=_('leave blank to determine automatically') ) - field_name = models.CharField(max_length=16, default='value') + field_name = models.CharField( + max_length=16, + default='value', + blank=True, + help_text=_('leave blank to determine automatically'), + ) configuration = models.CharField( max_length=16, null=True, choices=METRIC_CONFIGURATION_CHOICES ) @@ -96,11 +101,6 @@ def __setattr__(self, attrname, value): return super().__setattr__(attrname, value) def clean(self): - if ( - self.field_name == 'value' - and self.config_dict['field_name'] != '{field_name}' - ): - self.field_name = self.config_dict['field_name'] if self.key: return elif self.config_dict['key'] != '{key}': @@ -108,11 +108,30 @@ def clean(self): else: self.key = self.codename + def validate_alert_fields(self): + # When field_name is not provided while creating a metric + # then use config_dict['field_name] as metric field_name + if self.config_dict['field_name'] != '{field_name}': + if self.field_name in ['', 'value']: + self.field_name = self.config_dict['field_name'] + return + # field_name must be one of the metric fields + alert_fields = [self.config_dict['field_name']] + self.related_fields + if self.field_name not in alert_fields: + raise ValidationError( + f'"{self.field_name}" must be one of the following metric fields ie. {alert_fields}' + ) + def full_clean(self, *args, **kwargs): + # The name of the metric will be the same as the + # configuration chosen by the user only when the + # name field is empty (useful for AlertSettingsInline) if not self.name: - self.name = self.config_dict['name'] + self.name = self.get_configuration_display() # clean up key before field validation self.key = self._makekey(self.key) + # validate metric field_name for alerts + self.validate_alert_fields() return super().full_clean(*args, **kwargs) @classmethod @@ -202,6 +221,16 @@ def content_type_key(self): except AttributeError: return None + @property + def alert_field(self): + if self.field_name != self.config_dict['field_name']: + return self.field_name + return self.config_dict.get('alert_field', self.field_name) + + @property + def alert_on_related_field(self): + return self.alert_field in self.related_fields + def _get_time(self, time): """ If time is a string, convert it to a datetime @@ -359,6 +388,21 @@ def write( 'send_alert': send_alert, } options['metric_pk'] = self.pk + + # if alert_on_related_field then check threshold + # on the related_field instead of field_name + if self.alert_on_related_field: + if not extra_values: + raise ValueError( + 'write() missing keyword argument: "extra_values" required for alert on related field' + ) + if self.alert_field not in extra_values.keys(): + raise ValueError( + f'"{key}" is not defined for alert_field in metric configuration' + ) + options['check_threshold_kwargs'].update( + {'value': extra_values[self.alert_field]} + ) timeseries_write.delay(name=self.key, values=values, **options) def read(self, **kwargs): @@ -444,6 +488,10 @@ def trace_order(self): def calculate_total(self): return self.config_dict.get('calculate_total', False) + @property + def connect_points(self): + return self.config_dict.get('connect_points', False) + @property def description(self): return self.config_dict['description'].format( @@ -630,6 +678,7 @@ def json(self, time=DEFAULT_TIME, **kwargs): 'trace_type': self.trace_type, 'trace_order': self.trace_order, 'calculate_total': self.calculate_total, + 'connect_points': self.connect_points, 'colors': self.colors, } ) @@ -688,6 +737,12 @@ class Meta: abstract = True verbose_name = _('Alert settings') verbose_name_plural = verbose_name + permissions = ( + ('add_alertsettings_inline', 'Can add Alert settings inline'), + ('change_alertsettings_inline', 'Can change Alert settings inline'), + ('delete_alertsettings_inline', 'Can delete Alert settings inline'), + ('view_alertsettings_inline', 'Can view Alert settings inline'), + ) def full_clean(self, *args, **kwargs): if self.custom_threshold == self.config_dict['threshold']: @@ -757,6 +812,11 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): return value_crossed # tolerance is set, we must go back in time # to ensure the threshold is trepassed for enough time + # if alert field is supplied, retrieve such field when reading + # so that we can let the system calculate the threshold on it + extra_fields = [] + if self.metric.alert_on_related_field: + extra_fields = [self.metric.alert_field] if time is None: # retrieves latest measurements, ordered by most recent first points = self.metric.read( @@ -764,6 +824,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): limit=None, order='-time', retention_policy=retention_policy, + extra_fields=extra_fields, ) # store a list with the results results = [value_crossed] @@ -775,7 +836,7 @@ def _is_crossed_by(self, current_value, time=None, retention_policy=None): continue utc_time = utc.localize(datetime.utcfromtimestamp(point['time'])) # did this point cross the threshold? Append to result list - results.append(self._value_crossed(point[self.metric.field_name])) + results.append(self._value_crossed(point[self.metric.alert_field])) # tolerance is trepassed if self._time_crossed(utc_time): # if the latest results are consistent, the metric being diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 36ab9d9dc..448771222 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -207,7 +207,7 @@ def _get_access_tech(): _('Total download traffic'), _('Total upload traffic'), ], - 'unit': 'adaptive_bytes', + 'unit': 'adaptive_prefix+B', 'order': 240, 'query': chart_query['traffic'], 'colors': [ @@ -228,6 +228,7 @@ def _get_access_tech(): 'general_traffic': { 'type': 'stackedbar+lines', 'calculate_total': True, + 'fill': 'none', 'trace_type': { 'download': 'stackedbar', 'upload': 'stackedbar', @@ -244,7 +245,7 @@ def _get_access_tech(): _('Total download traffic'), _('Total upload traffic'), ], - 'unit': 'adaptive_bytes', + 'unit': 'adaptive_prefix+B', 'order': 240, 'query': chart_query['general_traffic'], 'query_default_param': { @@ -544,6 +545,121 @@ def _get_access_tech(): } }, }, + 'iperf3': { + 'label': _('Iperf3'), + 'name': 'Iperf3', + 'key': 'iperf3', + 'field_name': 'iperf3_result', + 'related_fields': [ + 'sent_bps_tcp', + 'received_bps_tcp', + 'sent_bytes_tcp', + 'received_bytes_tcp', + 'retransmits', + 'sent_bytes_udp', + 'sent_bps_udp', + 'jitter', + 'total_packets', + 'lost_packets', + 'lost_percent', + ], + 'charts': { + 'bandwidth': { + 'type': 'scatter', + 'connect_points': True, + 'title': _('Bandwidth'), + 'fill': 'none', + 'description': _('Bitrate during Iperf3 test.'), + 'summary_labels': [ + _('TCP bitrate'), + _('UDP bitrate'), + ], + 'unit': 'adaptive_prefix+bps', + 'order': 280, + 'query': chart_query['bandwidth'], + 'colors': [ + DEFAULT_COLORS[0], + DEFAULT_COLORS[3], + ], + }, + 'transfer': { + 'type': 'scatter', + 'connect_points': True, + 'fill': 'none', + 'title': _('Transferred Data'), + 'description': _('Transferred Data during Iperf3 test.'), + 'summary_labels': [ + _('TCP transferred data'), + _('UDP transferred data'), + ], + 'unit': 'adaptive_prefix+B', + 'order': 290, + 'query': chart_query['transfer'], + 'colors': [ + DEFAULT_COLORS[0], + DEFAULT_COLORS[3], + ], + }, + 'retransmits': { + 'type': 'scatter', + 'connect_points': True, + 'title': _('Retransmits'), + 'description': _('Retransmits during Iperf3 test in TCP mode.'), + 'summary_labels': [_('Restransmits')], + 'unit': '', + 'order': 300, + 'query': chart_query['retransmits'], + 'colors': [DEFAULT_COLORS[-3]], + }, + 'jitter': { + 'type': 'scatter', + 'connect_points': True, + 'title': _('Jitter'), + 'description': _( + 'Jitter is a variance in latency measured using Iperf3 utility in UDP mode.' + ), + 'summary_labels': [ + _('Jitter'), + ], + 'unit': _(' ms'), + 'order': 330, + 'query': chart_query['jitter'], + 'colors': [DEFAULT_COLORS[4]], + }, + 'datagram': { + 'type': 'scatter', + 'fill': 'none', + 'connect_points': True, + 'title': _('Datagram'), + 'description': _( + '(Lost / Total) datagrams measured by Iperf3 test in UDP mode.' + ), + 'summary_labels': [ + _('Lost datagram'), + _('Total datagram'), + ], + 'unit': '', + 'order': 340, + 'query': chart_query['datagram'], + 'colors': [DEFAULT_COLORS[3], DEFAULT_COLORS[2]], + }, + 'datagram_loss': { + 'type': 'scatter', + 'connect_points': True, + 'title': _('Datagram Loss'), + 'description': _( + 'Indicates datagram loss % during Iperf3 test in UDP mode.' + ), + 'summary_labels': [ + _('Datagram loss'), + ], + 'unit': '%', + 'order': 350, + 'query': chart_query['datagram_loss'], + 'colors': [DEFAULT_COLORS[3]], + }, + }, + }, } DEFAULT_CHARTS = {} @@ -556,6 +672,10 @@ def _validate_metric_configuration(metric_config): assert 'name' in metric_config assert 'key' in metric_config assert 'field_name' in metric_config + if 'alert_field' in metric_config: + # ensure only valid alert_field is present + alert_fields = [metric_config['field_name']] + metric_config['related_fields'] + assert metric_config['alert_field'] in alert_fields def _validate_chart_configuration(chart_config): diff --git a/openwisp_monitoring/monitoring/migrations/0009_alter_alertsettings_options.py b/openwisp_monitoring/monitoring/migrations/0009_alter_alertsettings_options.py new file mode 100644 index 000000000..02be22f99 --- /dev/null +++ b/openwisp_monitoring/monitoring/migrations/0009_alter_alertsettings_options.py @@ -0,0 +1,26 @@ +# Generated by Django 3.2.14 on 2022-08-12 11:04 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('monitoring', '0008_create_general_metrics'), + ] + + operations = [ + migrations.AlterModelOptions( + name='alertsettings', + options={ + 'permissions': ( + ('add_alertsettings_inline', 'Can add Alert settings inline'), + ('change_alertsettings_inline', 'Can change Alert settings inline'), + ('delete_alertsettings_inline', 'Can delete Alert settings inline'), + ('view_alertsettings_inline', 'Can view Alert settings inline'), + ), + 'verbose_name': 'Alert settings', + 'verbose_name_plural': 'Alert settings', + }, + ), + ] diff --git a/openwisp_monitoring/monitoring/migrations/0010_add_alertsettings_inline_permissions.py b/openwisp_monitoring/monitoring/migrations/0010_add_alertsettings_inline_permissions.py new file mode 100644 index 000000000..9485f47a7 --- /dev/null +++ b/openwisp_monitoring/monitoring/migrations/0010_add_alertsettings_inline_permissions.py @@ -0,0 +1,19 @@ +# Generated by Django 4.0.4 on 2022-08-19 11:28 + +from django.db import migrations + +from . import assign_alertsettings_inline_permissions_to_groups + + +class Migration(migrations.Migration): + + dependencies = [ + ('monitoring', '0009_alter_alertsettings_options'), + ] + + operations = [ + migrations.RunPython( + assign_alertsettings_inline_permissions_to_groups, + reverse_code=migrations.RunPython.noop, + ), + ] diff --git a/openwisp_monitoring/monitoring/migrations/0011_alter_metric_field_name.py b/openwisp_monitoring/monitoring/migrations/0011_alter_metric_field_name.py new file mode 100644 index 000000000..1c072d3b5 --- /dev/null +++ b/openwisp_monitoring/monitoring/migrations/0011_alter_metric_field_name.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.15 on 2022-09-08 11:11 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('monitoring', '0010_add_alertsettings_inline_permissions'), + ] + + operations = [ + migrations.AlterField( + model_name='metric', + name='field_name', + field=models.CharField( + blank=True, + default='value', + help_text='leave blank to determine automatically', + max_length=16, + ), + ), + ] diff --git a/openwisp_monitoring/monitoring/migrations/__init__.py b/openwisp_monitoring/monitoring/migrations/__init__.py index 29f5158c1..f519cb6c5 100644 --- a/openwisp_monitoring/monitoring/migrations/__init__.py +++ b/openwisp_monitoring/monitoring/migrations/__init__.py @@ -37,6 +37,37 @@ def assign_permissions_to_groups(apps, schema_editor): ) +def assign_alertsettings_inline_permissions_to_groups(apps, schema_editor): + create_default_permissions(apps, schema_editor) + operators_read_only_admins_manage = [ + 'alertsettings', + ] + manage_operations = ['add', 'view', 'change', 'delete'] + Group = get_swapped_model(apps, 'openwisp_users', 'Group') + + try: + admin = Group.objects.get(name='Administrator') + operator = Group.objects.get(name='Operator') + # consider failures custom cases + # that do not have to be dealt with + except Group.DoesNotExist: + return + + for model_name in operators_read_only_admins_manage: + try: + permission = Permission.objects.get( + codename='view_{}_inline'.format(model_name) + ) + operator.permissions.add(permission.pk) + except Permission.DoesNotExist: + pass + for operation in manage_operations: + permission = Permission.objects.get( + codename='{}_{}_inline'.format(operation, model_name) + ) + admin.permissions.add(permission.pk) + + def create_general_metrics(apps, schema_editor): Chart = swapper.load_model('monitoring', 'Chart') Metric = swapper.load_model('monitoring', 'Metric') diff --git a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js index 8a11ab9e8..bfad3e564 100644 --- a/openwisp_monitoring/monitoring/static/monitoring/js/chart.js +++ b/openwisp_monitoring/monitoring/static/monitoring/js/chart.js @@ -20,19 +20,19 @@ function getAdaptiveScale(value, multiplier, unit) { if (value == 0) { multiplier = 1; - unit = 'B'; + unit = unit; } else if (value < 0.001) { multiplier = 1000000; - unit = 'KB'; + unit = 'K' + unit; } else if (value < 1) { multiplier = 1000; - unit = 'MB'; + unit = 'M' + unit; } else if (value < 1000) { multiplier = 1; - unit = 'GB'; + unit = 'G' + unit; } else if (value >= 1000) { multiplier = 0.001; - unit = 'TB'; + unit = 'T' + unit; } return { multiplier: multiplier, @@ -44,7 +44,7 @@ return Math.round((value * multiplier) * 100) / 100; } - function adaptiveFilterPoints(charts, layout, yRawVal) { + function adaptiveFilterPoints(charts, layout, yRawVal, chartUnit = '') { var y = charts[0].y, sum = 0, count = 0, shownVal, average; for (var i=0; i < y.length; i++) { sum += y[i]; @@ -53,7 +53,7 @@ } } average = sum / count; - var scales = getAdaptiveScale(average, 1, ''); + var scales = getAdaptiveScale(average, 1, chartUnit); var multiplier = scales.multiplier, unit = scales.unit; for (i=0; i < y.length; i++) { @@ -64,7 +64,7 @@ } shownVal = charts[j].y[i]; charts[j].y[i] = getAdaptiveBytes(charts[j].y[i], multiplier); - var hoverScales = getAdaptiveScale(shownVal, 1, ''); + var hoverScales = getAdaptiveScale(shownVal, 1, chartUnit); var hoverMultiplier = hoverScales.multiplier, hoverUnit = hoverScales.unit; shownVal = getAdaptiveBytes(shownVal, hoverMultiplier); @@ -74,8 +74,8 @@ layout.yaxis.title = unit; } - function adaptiveFilterSummary(i, percircles, value) { - var scales = getAdaptiveScale(value, 1, ''), + function adaptiveFilterSummary(i, percircles, value, chartUnit = '') { + var scales = getAdaptiveScale(value, 1, chartUnit), multiplier = scales.multiplier, unit = scales.unit; value = getAdaptiveBytes(value, multiplier); @@ -138,7 +138,7 @@ if (type === 'histogram') { layout.hovermode = 'closest'; } - var map, mapped, label, fixedValue, key; + var map, mapped, label, fixedValue, key, chartUnit, yValues; // given a value, returns its color and description // according to the color map configuration of this chart function findInColorMap(value) { @@ -190,6 +190,7 @@ // We use the "_key" field to sort the charts // according to the order defined in "data.trace_order" _key: key, + _connectPoints : data.connect_points || false, }, yValuesRaw = data.traces[i][1]; if (type !== 'histogram') { @@ -208,7 +209,10 @@ options.type = 'scatter'; options.mode = 'lines+markers'; options.line = {shape: 'hvh'}; - options.fill = "none"; + options.fill = data.fill; + } + if (options._connectPoints) { + options.mode = 'lines'; } } } @@ -228,6 +232,11 @@ layout.margin.b = 45; } } + + var xValuesRaw = options.x; + if (options._connectPoints) { + options.x = []; + } // adjust text to be displayed in Y values // differentiate between values with zero and no values at all (N/A) for (var c=0; c