From 63c81714d72c974fbadb10639f284b2428777c27 Mon Sep 17 00:00:00 2001 From: Anton Date: Thu, 7 Mar 2024 00:22:45 +0500 Subject: [PATCH] Extraction and polling (#72) * xml and textfsm support * requests and scrpli_netconf support * fill poller main creds via js * bugfixes * js select error handling * serialization fixes * serializer parameters * dynamic rendering of serializer template fields * bugfixes * mkarr, subform validation * http poller fixes * introduce sync_in_migration * subform api validation * tests * docs * mknum * tests for jq --- README.md | 8 +- docs/entities/commands.md | 51 +++- docs/entities/pollers.md | 46 +++- docs/entities/serializers.md | 249 +++++++++++++----- requirements/base.txt | 11 +- validity/api/helpers.py | 21 ++ validity/api/serializers.py | 7 +- validity/choices.py | 17 +- validity/compliance/eval/default_nameset.py | 11 +- validity/compliance/serialization/__init__.py | 10 +- validity/compliance/serialization/backend.py | 18 +- validity/compliance/serialization/common.py | 14 + validity/compliance/serialization/routeros.py | 5 +- validity/compliance/serialization/textfsm.py | 15 ++ validity/compliance/serialization/ttp.py | 13 +- validity/compliance/serialization/xml.py | 21 ++ validity/compliance/serialization/yaml.py | 4 +- validity/forms/general.py | 33 ++- validity/forms/helpers.py | 65 +++-- validity/migrations/0001_initial.py | 2 +- validity/migrations/0004_netbox35_scripts.py | 7 +- validity/migrations/0008_script_change.py | 7 +- .../migrations/0009_serializer_parameters.py | 18 ++ validity/models/base.py | 24 +- validity/models/data.py | 30 ++- validity/models/polling.py | 22 +- validity/models/serializer.py | 44 +++- validity/pollers/default_credentials.py | 51 ++++ validity/pollers/factory.py | 12 +- validity/pollers/http.py | 58 ++++ validity/pollers/netconf.py | 19 ++ .../static/validity/connection-type-select.js | 19 ++ validity/subforms.py | 88 ++++++- validity/tables.py | 7 +- validity/templates/validity/command.html | 20 +- .../templates/validity/inc/parameters.html | 20 ++ validity/templates/validity/poller_edit.html | 14 + validity/templates/validity/serializer.html | 3 + validity/templatetags/validity.py | 6 - validity/tests/test_api.py | 23 ++ validity/tests/test_compliance/test_eval.py | 9 +- .../test_compliance/test_serialization.py | 46 +++- validity/tests/test_pollers.py | 28 +- validity/tests/test_utils/test_json.py | 48 ++++ validity/utils/json.py | 78 ++++++ validity/views/poller.py | 5 + validity/views/serializer.py | 4 +- 47 files changed, 1103 insertions(+), 228 deletions(-) create mode 100644 validity/compliance/serialization/common.py create mode 100644 validity/compliance/serialization/textfsm.py create mode 100644 validity/compliance/serialization/xml.py create mode 100644 validity/migrations/0009_serializer_parameters.py create mode 100644 validity/pollers/default_credentials.py create mode 100644 validity/pollers/http.py create mode 100644 validity/pollers/netconf.py create mode 100644 validity/static/validity/connection-type-select.js create mode 100644 validity/templates/validity/inc/parameters.html create mode 100644 validity/templates/validity/poller_edit.html create mode 100644 validity/tests/test_utils/test_json.py create mode 100644 validity/utils/json.py diff --git a/README.md b/README.md index c8d1483..fb0d19f 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Validity is the [NetBox](https://netbox.dev) plugin to write "auto tests" for yo 3. Write compliance test as a Python expression, e.g.
`device.config["ntp-servers"] == ["1.2.3.4", "5.6.7.8"]`
or
-`not device.state.show_stp['enabled']` +`'10.0.0.0/8' in {entry['prefix'] for entry in device.state.show_route}` 4. Apply created test to specific devices and get the results per device (passed or failed). @@ -33,13 +33,13 @@ or
## Why? Validity helps you to concentrate on what really matters - defining the criteria of healthy and valid network and following these criteria. -Validity completely separates compliance test code from all other things like data collection, serialization and storage. This one encourages you to write short, clean and understandable compliance tests together with the mandatory description. +Validity completely separates compliance test code from all the other things like data collection, parsing and storage. It encourages you to write short, clean and understandable compliance tests together with the mandatory description. ## Key Features -* Truly vendor-agnostic. You can easily integrate any vendor config format using [TTP](https://github.com/dmulyalin/ttp) +* Truly vendor-agnostic. You can easily integrate any vendor config format using [TTP](https://github.com/dmulyalin/ttp) or a bunch of other [serialization options](https://validity.readthedocs.io/en/latest/entities/serializers/) * Writing compliance tests using Python expressions and [JQ](https://stedolan.github.io/jq/manual/) -* Direct polling of the devices via SSH or Telnet. More than 100 different platforms are available through [netmiko](https://github.com/ktbyers/netmiko) library. +* Gathering configuration or state info directly from the devices via **SSH**, **Telnet**, **Netconf** or **REST API**. * Flexible selector system to apply the tests only to a specific subset of devices * Concept of **dynamic pairs**. With dynamic pair you can compare 2 different devices between each other (e.g. compare the configuration of 2 MC-LAG members). * **Test result explanation**. When some test fails, you can get the **explanation** of the calculation process step by step. It helps to identify the cause of the failure. diff --git a/docs/entities/commands.md b/docs/entities/commands.md index 867cd51..1a47502 100644 --- a/docs/entities/commands.md +++ b/docs/entities/commands.md @@ -18,13 +18,20 @@ Label value is used to access serialized command output in Compliance Test. E.g. test expression `device.state.sh_version` implies there is a Command with label `sh_version`. #### Type -Type of the command. It defines other parameters that must be filled for this command. Right now only **CLI** type is available, but more types will appear in next releases. +Type of the command. It defines other parameters that must be filled for this command. Command of one specific type can be bound only to the Poller with the matching Connection Type. + +| Command Type | Matching Poller Type | +|--------------|----------------------| +| CLI | netmiko | +| NETCONF | scrapli_netconf | +| JSON_API | requests | + #### Retrieves configuration Defines either this command is supposed to retrieve device configuration or no. For each poller there can be **at most one** command which retrieves configuration. !!! note - Serialized state for command which retrieves configuration is always available through "config" key. Let's suppose we have a command with label `show_run` which has `retrieves_config=True`, then inside Compliance Test the serialized output of this command will be available through both `device.state.show_run` and `device.state.config`. + Serialized state for command which retrieves configuration is always available through "config" key. Let's suppose we have a command with label `show_run` which has `retrieves_config=True`, then inside Compliance Test the serialized output of this command will be available through both `device.state.show_run` and `device.config`. #### Serializer This field defines [Serializer](serializers.md) for Command output. @@ -34,4 +41,42 @@ This block contains type-specific parameters. ### Type:CLI #### CLI Command -This field contains text string which is going to be sent to device when polling occurs. \ No newline at end of file +This field must contain text string which is going to be sent to device when polling occurs. + +### Type:NETCONF +#### RPC +This field must contain an XML RPC which is going to be sent to device via Netconf. + +Example: + +``` + + + + + +``` + +### TYPE: JSON API +This option supports both REST API and various JSON-based APIs which do not follow REST + +#### Method +HTTP method used for polling. `Get` by default. + +#### URL Path +Path part of the URL. Will be appended (via Jinja2 expression) to hostname part defined in Poller credentials +Example: `/rest/ip/address/` + +#### Body + +Request body is optional. It may be useful for various JSON-based APIs which do not follow REST and may use POST or other queries for information retrieving. +You can use Jinja2 expressions as values in body dictionary. Available context variables are `device` and `command`. +Example: +```json +{ + "data": { + "commamnd": "get-config", + "device": "{{ device.name }}" + } +} +``` diff --git a/docs/entities/pollers.md b/docs/entities/pollers.md index be409b2..bdc8cca 100644 --- a/docs/entities/pollers.md +++ b/docs/entities/pollers.md @@ -11,27 +11,45 @@ Name of the Poller. Must be unique. Set of [Commands](commands.md) which are going to be sent to devices. #### Connection Type -This field defines the polling backend which will be used for this Poller. +This field defines the library used to interact with devices (polling backend). At the moment there are 3 options available: + +* [netmiko](https://github.com/ktbyers/netmiko) for polling via SSH or Telnet +* [scrapli_netconf](https://github.com/scrapli/scrapli_netconf) for polling via Netconf +* [requests](https://github.com/psf/requests) for polling via REST or JSON API #### Public credentials, Private credentials -These two fields must contain any credentials which will be passed to polling backend on its instantiation -All the values of private credentials will be encrypted after submitting. +These two fields must contain any credentials which will be passed to polling backend. +All the values of private credentials will be encrypted after submitting. These values are stored encrypted in the DB, decryption occurs only to pass the value to Polling backend. -!!! info - Let's consider an example to better understand how it works. - Let's suppose we have a Poller with: - * connection type: `netmiko` - * public credentials: `{"device_type": "cisco_ios", "username": "admin"}` - * private credentials: `{"password": "admin123"}` - When polling occurs, public and private credentials are merged (device primary IP will also be added there) and passed to **netmiko.ConnectHandler** - So, it means that in case of public/private credentials for **netmiko** you can define any keyword arguments [ConnectHandler](https://github.com/ktbyers/netmiko#getting-started-1) is ready to accept. +!!! warning + DJANGO_SECRET_KEY is used as an encryption key. Consider it in case of data migrations. -Private credentials are stored encrypted in the DB, decryption occurs only to pass the value to Polling backend. +## Credentials and polling backend -!!! warning - DJANGO_SECRET_KEY is used as an encryption key. Consider it in case of data migrations. +Let's consider an example to better understand how credentials are passed to selected Conenction Type. +Let's suppose we have a Poller with: +* connection type: `netmiko` +* public credentials: `{"device_type": "cisco_ios", "username": "admin"}` +* private credentials: `{"password": "admin123"}` +When polling occurs, public and private credentials are merged (device primary IP will also be added there) and passed to **netmiko.ConnectHandler** +So, it means that in case of public/private credentials for **netmiko** you can define any keyword arguments [ConnectHandler](https://github.com/ktbyers/netmiko#getting-started-1) is ready to accept. + + +The table below points out the entities which accept merged credentials from poller: + +| Connection Type | Entity that accepts credentials | +|-----------------|--------------------------------------| +| netmiko | netmiko.ConnectHandler | +| scrapli_netconf | scrapli_netconf.driver.NetconfDriver | +| requests | requests.request | + +For **requests** case there is some extra logic here: +1. `url` credential accepts Jinja2 expression, `device` and `command` are available as context variables. Default URL value:
+`https://{{device.primary_ip.address.ip}}/{{command.parameters.url_path.lstrip('/')}}` +2. Pass something like `{"auth": ["admin_user", "admin_password"]}` to use basic auth. +3. SSL verification is turned off by default. You can turn it back on by specifying `{"verify": true}` ## Binding Pollers to Devices diff --git a/docs/entities/serializers.md b/docs/entities/serializers.md index 2e8e114..f88a34e 100644 --- a/docs/entities/serializers.md +++ b/docs/entities/serializers.md @@ -1,87 +1,103 @@ # Serializers -Serializer is used to translate/parse device configuration (or other state info) from vendor specific format into JSON. +Serializer is used to translate/parse device configuration (or other state data) from vendor specific format into JSON-like structure. -The main approach used in Validity is [Template Text Parser (TTP)](https://ttp.readthedocs.io/en/latest/Overview.html). This library allows you to define text template and then parse the data according to that template. Template language is very simple and looks like Jinja2 in the reverse way. +Validity has a bunch of different approaches (extraction methods) to accomplish this task. -There is another one way: you can somehow get already serialized data and tell Validity read it as already prepared JSON or YAML. This can be useful for some network vendors which have their own tools for getting JSON-formatted config (e.g. `| display json` on Junos). +## Extraction Methods -## Fields - -#### Name - -The name of the Serializer. Must be unique. - -#### Config Extraction Method - -The field with the following choices: **TTP**, **YAML**, **ROUTEROS** - -!!! note - Remember that YAML is the superset of JSON. So, YAML serializer can be used to read any JSON file. - -This field defines the way of getting serialized config from the text. - -**TTP** choice requires defining a template (see other fields below). - -**YAML** serializer has no additional properties and may be used to read already prepared JSON or YAML file. - -**ROUTEROS** serializer allows to parse MikroTik RouterOS configuration files. No additional configuration is needed. See below [MikroTik parsing](#mikrotik-parsing) - - - -#### TTP Template - -Inside this field at the Serializer page you can view your template defined either via DB or via Data Source. - -At the add/edit form this field is used to store TTP Template inside the DB. -This option fits well when you have small templates or just need to quickly test some setup. - - -#### Data Source and Data File - -!!! info - You can use only one option per one serializer instance: you either define your template via DB (**Template** field) or via Git (**Data Source** and **Data File** fields). You can't use both approaches at the same time. - -This pair of fields allows you to store Serializer template as a file in a Data Source (likely pointing to a git repository). - -This is the best option if you have plenty of complex Serializers and want to get all the benefits from storing them under version control. +### TTP +[Template Text Parser (TTP)](https://ttp.readthedocs.io/en/latest/Overview.html) is the preferred approach for parsing vendor-specific configuration data. TTP allows you to define a template and then parse text data according to that template. Template language is very simple and looks like Jinja2 in the reverse way. -## Bind Serializers to Devices - -Binding Serializer to Device is required to be able to serialize device configuration found by **device_config_path**. - -!!! note - You don't need to bind Serializer to Devices if you use direct polling. In this case Serializers are bound to [Commands](commands.md). +**Input data:** +```plain +interface Loopback0 + ip address 10.0.0.1 255.255.255.255 +! +interface Vlan100 + ip address 10.100.0.254 255.255.255.0 +! +``` +**Template:** +```plain + +interface {{ interface }} + ip address {{ address }} {{ mask }} + +``` -There are 3 ways to bind a Serializer to Device: +**Result:** +```json +{ + "interfaces": [ + { + "interface": "Loopback0", + "address": "10.0.0.1", + "mask": "255.255.255.255" + }, + { + "interface": "Vlan100", + "address": "10.100.0.254", + "mask": "255.255.255.0" + } + ] +} +``` -* Set the serializer at **Manufacturer** level. Go to Manufacturer page at set the serializer via custom fields. This action applies this serializer to all the devices with this Manufacturer. +### TEXTFSM -* Set the serializer at **Device Type** level. Go to Device Type page at set the serializer via custom fields. This action applies this serializer to all the devices with this Device Type and overwrites the value from Manufacturer. +[TextFSM](https://github.com/google/textfsm) is more suitable for `show`-commands output parsing. Unlike vanilla TextFSM, this extraction method outputs list of dicts. -* Set the serializer at the individual **Device** level. Go to Device page at set the serializer via custom fields. This action applies this serializer to one specific device and overwrites the values from Device Type and Manufacturer. +**Input data:** +```plain +Interface IP-Address OK? Method Status Protocol +FastEthernet0/0 15.0.15.1 YES manual up up +Loopback0 10.1.1.1 YES manual up up +``` +**Template:** +```plain +Value INTF (\S+) +Value ADDR (\S+) +Value STATUS (up|down|) +Value PROTO (up|down) -When device has bound Serializer and Data Source you can find out how serialized config looks like at the Device page (**Serialized State** tab) or by using API handle
-`/api/plugins/validity/devices//serialized_state/?name=config` +Start + ^${INTF}\s+${ADDR}\s+\w+\s+\w+\s+${STATUS}\s+${PROTO} -> Record +``` +**Result:** +```json +[ + { + "INTF": "FastEthernet0/0", + "ADDR": "15.0.15.1", + "STATUS": "up", + "PROTO": "up" + }, + { + "INTF": "Loopback0", + "ADDR": "10.1.1.1", + "STATUS": "up", + "PROTO": "up" + } +] +``` -## MikroTik parsing +### ROUTEROS -Validity has an option to parse MikroTik RouterOS config files without TTP. You just need `ROUTEROS` method in serializer settings to do it. Why MikroTik instead of other vendors? There are 2 reasons: +Validity has an option to parse MikroTik RouterOS config files. You just need `ROUTEROS` method in serializer settings to do it. Why MikroTik instead of other vendors? There are 2 reasons: -* MikroTik configuration is really difficult to parse with TTP. You have to take into account all possible configurations of each line with/without each of the parameters. +* MikroTik configuration is really difficult to parse with TTP. * At the same time, MikroTik configuration has the same structure as JSON may have. So, it's very easy to translate it using simple Python tools. !!! warning - Parser works only if the configuration structure strictly follows the `/export` command format. - - Things like `/ip address add address=1.2.3.4/24` won't be parsed - -Here is the example configuration: + Parser works only with **configuration** which structure strictly follows the `/export` command format. + If you want to work with operational state (`print`-commands), the easiest way would be to leverage MikroTik REST API and [YAML](#YAML) serializer. +**Input data:** ``` /interface ethernet set [ find default-name=ether1 ] comment="some comment" @@ -94,8 +110,7 @@ set www-ssl certificate=some_cert disabled=no set disable-ipv6=yes max-neighbor-entries=8192 ``` -And here is the parsing result in YAML: - +**Result (as YAML):** ```yaml interface: ethernet: @@ -121,4 +136,108 @@ ipv6: properties: disable-ipv6: true max-neighbor-entries: 8192 -``` \ No newline at end of file +``` + +### XML +This method translates input XML-formatted text into Python dict using [xmltodict](https://github.com/martinblech/xmltodict) library. It is mainly used together with [Netconf commands](./commands.md#typenetconf). + +**Input data:** +```xml + + one + two + +``` + +**Result:** +```json +{"a": {"b": ["one", "two"]}} +``` +#### mkarr and mknum + +XML extraction method has a few drawbacks: + +* all the integers and floats in the original XML will be turned into strings inside JSON +* List of values with one single member will be translated into a plain value with no list at all. + +Consider the example above with `` and ``. Let's remove the second ``:
+For `
one` XML the result will be `{"a": {"b": "one"}}` instead of
`{"a": {"b": ["one"]}}`. + +These issues can be handled with **JQ Expression** field. Validity introduces two custom JQ functions: +* **mkarr(path)** wraps expression at *path* into a list if it's not already a list. +* **mknum** or **mknum(path)** tries to convert all number-like strings *at path or lower* to numbers. Unlike **mkarr()**, this functions works recursively. So, `. | mknum` which is equivalent of `. | mknum(.)` will be applied to the entire document and will try to convert all number-like strings to numbers. + +Let's suppose that you got the following result of XML to JSON converting: +```json +{"a": {"b": "one"}, "c": "10.2"} +``` +After applying this JQ expression +```plain +. | mkarr(.a.b) | mknum +``` +you'll get +```json +{"a": {"b": ["one"]}, "c": 10.2} +``` + +### YAML +This method is used to work with already-prepared YAML or JSON data (don't forget that JSON is a subset of YAML). It suits well if you poll your devices via REST API or your vendor has its own tools to get JSON-formatted config (e.g. `| display json` on Junos). + + +## Fields + +#### Name + +The name of the Serializer. Must be unique. + +#### Extraction Method + +This field defines the way of data parsing, possible choices are described [above](#extraction-methods). After you fill out this field, NetBox UI will display other fields which are specific to selected extraction method. + +#### Template + +Inside this field at the Serializer page you can view your template defined either via DB or via Data Source. + +At the add/edit form this field is used to store TTP or TextFSM Template inside the DB. +This option fits well when you have small templates or just need to quickly test some setup. + + +#### Data Source and Data File + +!!! info + You can use only one option per one serializer instance: you either define your template via DB (**Template** field) or via link (**Data Source** and **Data File** fields). You can't use both approaches at the same time. + +This pair of fields allows you to store Serializer template as a file in a Data Source (likely pointing to a Git repository). + +This is the best option if you have plenty of complex Serializers and want to get all the benefits from storing them under version control. + +#### JQ Expression + +This optional field allows to post-process the parsing result by specifying [JQ](https://jqlang.github.io/jq/) expression. + +This feature may be convenient when you poll devices via Netconf or REST. The answer may contain a lot of information, and likely not all of it is useful. + +#### Drop XML Attributes + +This is the field specific to the XML extraction method. It allows to drop all XML attributes (they start with `@` sign after converting to JSON) from the result. It may be useful when dealing with netconf. + + +## Bind Serializers to Devices + +Binding Serializer to Device is required to be able to serialize device configuration found by **device_config_path**. + +!!! note + You don't need to bind Serializer to Devices if you use direct polling. In this case Serializers are bound to [Commands](commands.md). + + +There are 3 ways to bind a Serializer to Device: + +* Set the serializer at **Manufacturer** level. Go to Manufacturer page at set the serializer via custom fields. This action applies this serializer to all the devices with this Manufacturer. + +* Set the serializer at **Device Type** level. Go to Device Type page at set the serializer via custom fields. This action applies this serializer to all the devices with this Device Type and overwrites the value from Manufacturer. + +* Set the serializer at the individual **Device** level. Go to Device page at set the serializer via custom fields. This action applies this serializer to one specific device and overwrites the values from Device Type and Manufacturer. + + +When device has bound Serializer and Data Source you can find out how serialized config looks like at the Device page (**Serialized State** tab) or by using API handle
+`/dcim/devices//serialized_state/?name=config` diff --git a/requirements/base.txt b/requirements/base.txt index 8177644..1d54725 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,9 +1,12 @@ django-bootstrap-v5==1.0.* -pydantic >=2.0.0,<3 +pydantic>=2.0.0,<3 ttp==0.9.* -jq==1.4.* -deepdiff==6.2.* +jq>=1.4.0,<2 +deepdiff>=6.2.0,<7 simpleeval==0.9.* -netmiko >=4.0.0,<5 +netmiko>=4.0.0,<5 +scrapli_netconf==2024.1.30 +textfsm>=1.1.3,<2 +xmltodict<1 dulwich # Core NetBox "optional" requirement diff --git a/validity/api/helpers.py b/validity/api/helpers.py index 044f10d..01f7e15 100644 --- a/validity/api/helpers.py +++ b/validity/api/helpers.py @@ -1,6 +1,8 @@ from itertools import chain from typing import Sequence +from django.core.exceptions import ValidationError +from django.db.models import ManyToManyField from netbox.api.serializers import WritableNestedSerializer from rest_framework.serializers import JSONField, ModelSerializer @@ -63,3 +65,22 @@ def to_representation(self, instance): field_name: field for field_name, field in self.fields.items() if field_name in set(query_fields) } return super().to_representation(instance) + + +class SubformValidationMixin: + """ + Serializer Mixin. Validates JSON field according to a subform + """ + + def validate(self, attrs): + instance = self.instance or self.Meta.model() + for field, field_value in attrs.items(): + if not isinstance(instance._meta.get_field(field), ManyToManyField): + setattr(instance, field, field_value) + subform = instance.subform_cls(instance.subform_json) + if not subform.is_valid(): + errors = [ + ": ".join((field, err[0])) if field != "__all__" else err for field, err in subform.errors.items() + ] + raise ValidationError({instance.subform_json_field: errors}) + return attrs diff --git a/validity/api/serializers.py b/validity/api/serializers.py index 2bdc371..89515c6 100644 --- a/validity/api/serializers.py +++ b/validity/api/serializers.py @@ -18,7 +18,7 @@ from tenancy.models import Tenant from validity import models -from .helpers import EncryptedDictField, FieldsMixin, ListQPMixin, nested_factory +from .helpers import EncryptedDictField, FieldsMixin, ListQPMixin, SubformValidationMixin, nested_factory class ComplianceSelectorSerializer(NetBoxModelSerializer): @@ -188,7 +188,7 @@ class Meta: ) -class SerializerSerializer(NetBoxModelSerializer): +class SerializerSerializer(SubformValidationMixin, NetBoxModelSerializer): url = serializers.HyperlinkedIdentityField(view_name="plugins-api:validity-api:serializer-detail") template = serializers.CharField(write_only=True, required=False) effective_template = serializers.ReadOnlyField() @@ -207,6 +207,7 @@ class Meta: "template", "data_source", "data_file", + "parameters", "tags", "custom_fields", "created", @@ -275,7 +276,7 @@ class Meta(NestedDeviceSerializer.Meta): ] -class CommandSerializer(NetBoxModelSerializer): +class CommandSerializer(SubformValidationMixin, NetBoxModelSerializer): serializer = NestedSerializerSerializer(required=False) url = serializers.HyperlinkedIdentityField(view_name="plugins-api:validity-api:command-detail") diff --git a/validity/choices.py b/validity/choices.py index 505d3f6..d12291a 100644 --- a/validity/choices.py +++ b/validity/choices.py @@ -79,7 +79,9 @@ def ge(cls, severity: "SeverityChoices") -> list[str]: class ExtractionMethodChoices(TextChoices, metaclass=ColoredChoiceMeta): TTP = "TTP", "TTP", "purple" + TEXTFSM = "TEXTFSM", "TEXTFSM", "blue" YAML = "YAML", "YAML", "info" + XML = "XML", "XML", "orange" ROUTEROS = "ROUTEROS", "ROUTEROS", "green" @@ -110,9 +112,11 @@ def pk_field(self): class ConnectionTypeChoices(TextChoices, metaclass=ColoredChoiceMeta): - netmiko = "netmiko", "blue" + netmiko = "netmiko", "netmiko", "blue" + requests = "requests", "requests", "info" + scrapli_netconf = "scrapli_netconf", "scrapli_netconf", "orange" - __command_types__ = {"netmiko": "CLI"} + __command_types__ = {"netmiko": "CLI", "scrapli_netconf": "netconf", "requests": "json_api"} @property def acceptable_command_type(self) -> "CommandTypeChoices": @@ -121,9 +125,18 @@ def acceptable_command_type(self) -> "CommandTypeChoices": class CommandTypeChoices(TextChoices, metaclass=ColoredChoiceMeta): CLI = "CLI", "CLI", "blue" + netconf = "netconf", "orange" + json_api = "json_api", "JSON API", "info" class ExplanationVerbosityChoices(IntegerChoices): disabled = 0, _("0 - Disabled") medium = 1, _("1 - Medium") maximum = 2, _("2 - Maximum") + + +class JSONAPIMethodChoices(TextChoices): + GET = "GET" + POST = "POST" + PATCH = "PATCH" + PUT = "PUT" diff --git a/validity/compliance/eval/default_nameset.py b/validity/compliance/eval/default_nameset.py index 2a52c72..c116a0d 100644 --- a/validity/compliance/eval/default_nameset.py +++ b/validity/compliance/eval/default_nameset.py @@ -1,8 +1,7 @@ from builtins import * # noqa -import jq as pyjq - from validity.models import VDevice +from validity.utils.json import jq # noqa builtins = [ @@ -55,14 +54,6 @@ __all__ = ["jq", "config", "state"] + builtins -class jq: - first = staticmethod(pyjq.first) - all = staticmethod(pyjq.all) - - def __init__(self, *args, **kwargs) -> None: - raise TypeError("jq is not callable") - - def state(device): # state() implies presence of "_data_source" and "_poller" global variables # which are gonna be set by RunTests script diff --git a/validity/compliance/serialization/__init__.py b/validity/compliance/serialization/__init__.py index 0a24443..4e10f15 100644 --- a/validity/compliance/serialization/__init__.py +++ b/validity/compliance/serialization/__init__.py @@ -1,10 +1,18 @@ from .backend import SerializationBackend from .routeros import serialize_ros from .serializable import Serializable +from .textfsm import serialize_textfsm from .ttp import serialize_ttp +from .xml import serialize_xml from .yaml import serialize_yaml serialize = SerializationBackend( - extraction_methods={"YAML": serialize_yaml, "ROUTEROS": serialize_ros, "TTP": serialize_ttp} + extraction_methods={ + "YAML": serialize_yaml, + "ROUTEROS": serialize_ros, + "TTP": serialize_ttp, + "TEXTFSM": serialize_textfsm, + "XML": serialize_xml, + } ) diff --git a/validity/compliance/serialization/backend.py b/validity/compliance/serialization/backend.py index 8a728e8..568b04b 100644 --- a/validity/compliance/serialization/backend.py +++ b/validity/compliance/serialization/backend.py @@ -1,10 +1,18 @@ -from typing import Callable +from typing import TYPE_CHECKING, Callable + +from validity.utils.misc import reraise +from ..exceptions import SerializationError + + +if TYPE_CHECKING: + from validity.models import Serializer class SerializationBackend: - def __init__(self, extraction_methods: dict[str, Callable[[str, str], dict]]) -> None: + def __init__(self, extraction_methods: dict[str, Callable[[str, str, dict], dict | list]]) -> None: self.extraction_methods = extraction_methods - def __call__(self, extraction_method: str, plain_data: str, template: str): - extraction_function = self.extraction_methods[extraction_method] - return extraction_function(plain_data, template) + def __call__(self, serializer: "Serializer", plain_data: str): + extraction_function = self.extraction_methods[serializer.extraction_method] + with reraise(Exception, SerializationError): + return extraction_function(plain_data, serializer.effective_template, serializer.parameters) diff --git a/validity/compliance/serialization/common.py b/validity/compliance/serialization/common.py new file mode 100644 index 0000000..830512f --- /dev/null +++ b/validity/compliance/serialization/common.py @@ -0,0 +1,14 @@ +from functools import wraps + +from validity.utils.json import jq + + +def postprocess_jq(func): + @wraps(func) + def inner(plain_data: str, template: str, parameters: dict): + result = func(plain_data, template, parameters) + if jq_expression := parameters.get("jq_expression"): + result = jq.first(jq_expression, result) + return result + + return inner diff --git a/validity/compliance/serialization/routeros.py b/validity/compliance/serialization/routeros.py index f50c064..720a124 100644 --- a/validity/compliance/serialization/routeros.py +++ b/validity/compliance/serialization/routeros.py @@ -146,6 +146,5 @@ def parse_config(plain_config: str) -> dict: return result -def serialize_ros(plain_data: str, template: str = ""): - with reraise(Exception, SerializationError): - return parse_config(plain_data) +def serialize_ros(plain_data: str, template: str, parameters: dict): + return parse_config(plain_data) diff --git a/validity/compliance/serialization/textfsm.py b/validity/compliance/serialization/textfsm.py new file mode 100644 index 0000000..814cd3b --- /dev/null +++ b/validity/compliance/serialization/textfsm.py @@ -0,0 +1,15 @@ +import io + +import textfsm + +from .common import postprocess_jq + + +@postprocess_jq +def serialize_textfsm(plain_data: str, template: str, parameters: dict) -> list[dict]: + dict_results = [] + template_file = io.StringIO(template) + fsm = textfsm.TextFSM(template_file) + for fsm_result in fsm.ParseText(plain_data): + dict_results.append({k: v for k, v in zip(fsm.header, fsm_result)}) + return dict_results diff --git a/validity/compliance/serialization/ttp.py b/validity/compliance/serialization/ttp.py index 1651aed..0018a5a 100644 --- a/validity/compliance/serialization/ttp.py +++ b/validity/compliance/serialization/ttp.py @@ -1,11 +1,10 @@ from ttp import ttp -from validity.utils.misc import reraise -from ..exceptions import SerializationError +from .common import postprocess_jq -def serialize_ttp(plain_data: str, template: str): - with reraise(Exception, SerializationError): - parser = ttp(data=plain_data, template=template) - parser.parse() - return parser.result()[0][0] +@postprocess_jq +def serialize_ttp(plain_data: str, template: str, parameters: dict): + parser = ttp(data=plain_data, template=template) + parser.parse() + return parser.result()[0][0] diff --git a/validity/compliance/serialization/xml.py b/validity/compliance/serialization/xml.py new file mode 100644 index 0000000..f1eff8c --- /dev/null +++ b/validity/compliance/serialization/xml.py @@ -0,0 +1,21 @@ +from xml.parsers.expat import ExpatError + +import xmltodict + +from validity.utils.json import transform_json +from validity.utils.misc import reraise +from ..exceptions import SerializationError +from .common import postprocess_jq + + +@postprocess_jq +def serialize_xml(plain_data: str, template: str, parameters: dict): + with reraise(ExpatError, SerializationError, "Got invalid XML"): + result = xmltodict.parse(plain_data) + if parameters.get("drop_attributes"): + result = transform_json( + result, + match_fn=lambda key, _: isinstance(key, str) and key.startswith("@"), + transform_fn=lambda key, value: None, + ) + return result diff --git a/validity/compliance/serialization/yaml.py b/validity/compliance/serialization/yaml.py index ee46227..817aacd 100644 --- a/validity/compliance/serialization/yaml.py +++ b/validity/compliance/serialization/yaml.py @@ -2,8 +2,10 @@ from validity.utils.misc import reraise from ..exceptions import SerializationError +from .common import postprocess_jq -def serialize_yaml(plain_data: str, template: str = "") -> dict: +@postprocess_jq +def serialize_yaml(plain_data: str, template: str, parameters: dict) -> dict: with reraise(yaml.YAMLError, SerializationError, "Got invalid JSON/YAML"): return yaml.safe_load(plain_data) diff --git a/validity/forms/general.py b/validity/forms/general.py index 33406b9..82d469a 100644 --- a/validity/forms/general.py +++ b/validity/forms/general.py @@ -1,16 +1,17 @@ from core.forms.mixins import SyncedDataMixin from dcim.models import DeviceType, Location, Manufacturer, Platform, Site -from django.forms import CharField, Textarea, ValidationError +from django.forms import CharField, ChoiceField, Select, Textarea, ValidationError from django.utils.translation import gettext_lazy as _ from extras.models import Tag from netbox.forms import NetBoxModelForm from tenancy.models import Tenant -from utilities.forms import get_field_value +from utilities.forms import add_blank_choice, get_field_value from utilities.forms.fields import DynamicModelChoiceField, DynamicModelMultipleChoiceField from utilities.forms.widgets import HTMXSelect from validity import models -from .helpers import SubformMixin +from validity.choices import ConnectionTypeChoices +from .helpers import PrettyJSONWidget, SubformMixin class ComplianceTestForm(SyncedDataMixin, NetBoxModelForm): @@ -85,18 +86,33 @@ def clean(self): return result -class SerializerForm(SyncedDataMixin, NetBoxModelForm): +class SerializerForm(SyncedDataMixin, SubformMixin, NetBoxModelForm): template = CharField(required=False, widget=Textarea(attrs={"style": "font-family:monospace"})) - fieldsets = ( + main_fieldsets = ( (_("Serializer"), ("name", "extraction_method", "tags")), + "__subform__", (_("Template from Data Source"), ("data_source", "data_file")), (_("Template from DB"), ("template",)), ) + @property + def fieldsets(self): + fs = super().fieldsets + if not self.subform or not self.subform.requires_template: + fs = fs[:-2] # drop "Template from..." fieldsets + return fs + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not self.subform or not self.subform.requires_template: + for field in ["template", "data_source", "data_file"]: + del self.fields[field] + class Meta: model = models.Serializer fields = ("name", "extraction_method", "template", "data_source", "data_file", "tags") + widgets = {"extraction_method": HTMXSelect()} class NameSetForm(NetBoxModelForm): @@ -115,14 +131,17 @@ class Meta: class PollerForm(NetBoxModelForm): + connection_type = ChoiceField( + choices=add_blank_choice(ConnectionTypeChoices.choices), widget=Select(attrs={"id": "connection_type_select"}) + ) commands = DynamicModelMultipleChoiceField(queryset=models.Command.objects.all()) class Meta: model = models.Poller fields = ("name", "commands", "connection_type", "public_credentials", "private_credentials", "tags") widgets = { - "public_credentials": Textarea(attrs={"style": "font-family:monospace"}), - "private_credentials": Textarea(attrs={"style": "font-family:monospace"}), + "public_credentials": PrettyJSONWidget(), + "private_credentials": PrettyJSONWidget(), } def clean(self): diff --git a/validity/forms/helpers.py b/validity/forms/helpers.py index 84c7a68..ac7701f 100644 --- a/validity/forms/helpers.py +++ b/validity/forms/helpers.py @@ -1,12 +1,25 @@ import json -from typing import Any, Sequence +from contextlib import suppress +from typing import Any, Literal, Sequence -from django.forms import ChoiceField, JSONField, Select +from django.forms import ChoiceField, JSONField, Select, Textarea from utilities.forms import get_field_value from validity.fields import EncryptedDict +class PrettyJSONWidget(Textarea): + def __init__(self, attrs=None, indent=2) -> None: + super().__init__(attrs) + self.attrs.setdefault("style", "font-family:monospace") + self.indent = indent + + def format_value(self, value: Any) -> str | None: + with suppress(Exception): + return json.dumps(json.loads(value), indent=self.indent) + return super().format_value(value) + + class IntegerChoiceField(ChoiceField): def to_python(self, value: Any | None) -> Any | None: if value is not None: @@ -49,25 +62,19 @@ def __init__(self, *args, exclude: Sequence[str] = (), **kwargs) -> None: class SubformMixin: - main_fieldsets: Sequence[tuple[str, Sequence]] - - @property - def type_field_name(self): - return self.instance.subform_type_field + main_fieldsets: Sequence[tuple[str, Sequence] | Literal["__subform__"]] @property - def json_field_name(self): + def json_field_name(self) -> str: return self.instance.subform_json_field @property - def json_field_value(self): - if self.json_field_name in self.initial: - return json.loads(self.initial[self.json_field_name]) - return getattr(self.instance, self.json_field_name) - - @json_field_value.setter - def json_field_value(self, value): - setattr(self.instance, self.json_field_name, value) + def json_field_value(self) -> dict: + if self.data: + return {k: v for k, v in self.data.items() if k in self.instance.subform_cls.base_fields} + if value := self.initial.get(self.json_field_name): + return json.loads(value) + return self.instance.subform_json @property def fieldset_title(self): @@ -75,19 +82,24 @@ def fieldset_title(self): @property def fieldsets(self): + if not self.subform or not self.subform.fields: + return [fs for fs in self.main_fieldsets if fs != "__subform__"] field_sets = list(self.main_fieldsets) - if self.subform: - field_sets.append((self.fieldset_title, self.subform.fields.keys())) + try: + subforms_idx = field_sets.index("__subform__") + except ValueError: + field_sets.append(None) + subforms_idx = -1 + field_sets[subforms_idx] = (self.fieldset_title, self.subform.fields.keys()) return field_sets def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.subform = None - type_field_value = get_field_value(self, self.type_field_name) + type_field_value = get_field_value(self, self.instance.subform_type_field) if type_field_value: - setattr(self.instance, self.type_field_name, type_field_value) - subform_cls = getattr(self.instance, self.json_field_name + "_form") - self.subform = subform_cls(self.json_field_value) + self.instance.subform_type = type_field_value + self.subform = self.instance.subform_cls(self.json_field_value) self.fields |= self.subform.fields self.initial |= self.subform.data @@ -97,5 +109,12 @@ def save(self, commit=True): for name in self.fields: if name in self.subform.fields: json_field[name] = self.cleaned_data[name] - self.json_field_value = json_field + self.instance.subform_json = json_field return super().save(commit) + + def clean(self): + cleaned_data = super().clean() + if self.subform: + for field, error in self.subform.errors.items(): + self.add_error(field, error) + return cleaned_data diff --git a/validity/migrations/0001_initial.py b/validity/migrations/0001_initial.py index 09740fd..0f73935 100644 --- a/validity/migrations/0001_initial.py +++ b/validity/migrations/0001_initial.py @@ -185,7 +185,7 @@ class Migration(migrations.Migration): models.CharField(blank=True, max_length=255, validators=[]), ), ("name", models.CharField(max_length=255, unique=True)), - ("extraction_method", models.CharField(default="TTP", max_length=10)), + ("extraction_method", models.CharField(max_length=10)), ("ttp_template", models.TextField(blank=True)), ( "repo", diff --git a/validity/migrations/0004_netbox35_scripts.py b/validity/migrations/0004_netbox35_scripts.py index 32450cd..bab1f28 100644 --- a/validity/migrations/0004_netbox35_scripts.py +++ b/validity/migrations/0004_netbox35_scripts.py @@ -15,14 +15,15 @@ def forward_func(apps, schema_editor): if config.netbox_version < "3.5.0": return - from core.models import DataSource + from validity.models import VDataSource from extras.models import ScriptModule db_alias = schema_editor.connection.alias - data_source = DataSource.objects.using(db_alias).create( + data_source = VDataSource.objects.using(db_alias).create( name=DATASOURCE_NAME, type="local", source_url="file://" + SCRIPTS_FOLDER, description=_("Required by Validity") ) - data_source.sync() + DataFile = apps.get_model("core", "DataFile") + data_source.sync_in_migration(DataFile) for data_file in data_source.datafiles.using(db_alias).all(): if data_file.path.endswith("__init__.py") or data_file.path.endswith(".pyc"): continue diff --git a/validity/migrations/0008_script_change.py b/validity/migrations/0008_script_change.py index 3a88371..a99c4ac 100644 --- a/validity/migrations/0008_script_change.py +++ b/validity/migrations/0008_script_change.py @@ -11,10 +11,11 @@ def forward_func(apps, schema_editor): - from core.models import DataSource + from validity.models import VDataSource from extras.models import ScriptModule - datasource, _ = DataSource.objects.get_or_create( + DataFile = apps.get_model("core", "DataFile") + datasource, _ = VDataSource.objects.get_or_create( name=DATASOURCE_NAME, type="local", defaults={"source_url": f"file://{SCRIPTS_INSTALL_FOLDER.parent}", "description": __("Required by Validity")}, @@ -23,7 +24,7 @@ def forward_func(apps, schema_editor): ScriptModule.objects.filter(data_source=datasource).delete() datasource.source_url = f"file://{SCRIPTS_INSTALL_FOLDER}" datasource.save() - datasource.sync() + datasource.sync_in_migration(DataFile) module = ScriptModule( data_source=datasource, data_file=datasource.datafiles.get(path=SCRIPT_NAME), diff --git a/validity/migrations/0009_serializer_parameters.py b/validity/migrations/0009_serializer_parameters.py new file mode 100644 index 0000000..71fb7af --- /dev/null +++ b/validity/migrations/0009_serializer_parameters.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.9 on 2024-02-19 19:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("validity", "0008_script_change"), + ] + + operations = [ + migrations.AddField( + model_name="serializer", + name="parameters", + field=models.JSONField(default=dict, blank=True), + ), + ] diff --git a/validity/models/base.py b/validity/models/base.py index 55d761c..dd43527 100644 --- a/validity/models/base.py +++ b/validity/models/base.py @@ -99,8 +99,22 @@ class SubformMixin: subform_type_field: str subforms: dict - def __getattr__(self, attr): - if attr == f"{self.subform_json_field}_form": - type_value = getattr(self, self.subform_type_field) - return self.subforms[type_value] - raise AttributeError(attr) + @property + def subform_type(self): + return getattr(self, self.subform_type_field) + + @subform_type.setter + def subform_type(self, value): + setattr(self, self.subform_type_field, value) + + @property + def subform_cls(self): + return self.subforms[self.subform_type] + + @property + def subform_json(self): + return getattr(self, self.subform_json_field) + + @subform_json.setter + def subform_json(self, value): + setattr(self, self.subform_json_field, value) diff --git a/validity/models/data.py b/validity/models/data.py index 9396a94..af97eb2 100644 --- a/validity/models/data.py +++ b/validity/models/data.py @@ -72,12 +72,12 @@ def _sync_status(self): DataSource.objects.filter(pk=self.pk).update(status=self.status, last_synced=self.last_synced) post_sync.send(sender=self.__class__, instance=self) - def partial_sync(self, device_filter: Q, batch_size: int = 1000) -> None: + def partial_sync(self, device_filter: Q, batch_size: int = 1000) -> set[str]: def update_batch(batch): for datafile in self.datafiles.filter(path__in=batch).iterator(): if datafile.refresh_from_disk(local_path): yield datafile - paths.discard(datafile.path) + updated_paths.add(datafile.path) def new_data_file(path): df = DataFile(source=self, path=path) @@ -85,22 +85,30 @@ def new_data_file(path): df.full_clean() return df - if self.type != "device_polling": - raise SyncError("Partial sync is available only for Data Source with type Device Polling") backend = self.get_backend() - with backend.fetch(device_filter) as local_path, self._sync_status(): - paths = self._walk(local_path) + fetch = backend.fetch(device_filter) if self.type == "device_polling" else backend.fetch() + with fetch as local_path, self._sync_status(): + all_new_paths = self._walk(local_path) + updated_paths = set() datafiles_to_update = chain.from_iterable( - update_batch(path_batch) for path_batch in batched(paths, batch_size) + update_batch(path_batch) for path_batch in batched(all_new_paths, batch_size) ) updated = DataFile.objects.bulk_update( datafiles_to_update, batch_size=batch_size, fields=("last_updated", "size", "hash", "data") ) - new_datafiles = (new_data_file(path) for path in paths) + new_datafiles = (new_data_file(path) for path in all_new_paths - updated_paths) created = len(DataFile.objects.bulk_create(new_datafiles, batch_size=batch_size)) logger.debug("%s new files were created and %s existing files were updated during sync", created, updated) + return all_new_paths def sync(self, device_filter: Q | None = None): - if device_filter is not None and self.type == "device_polling": - return self.partial_sync(device_filter) - return super().sync() + if device_filter is None or self.type != "device_polling": + return super().sync() + self.partial_sync(device_filter) + + def sync_in_migration(self, datafile_model: type): + """ + This method performs sync and avoids problems with historical models which have reference to DataFile + """ + new_paths = self.partial_sync(Q()) + datafile_model.objects.exclude(path__in=new_paths).delete() diff --git a/validity/models/polling.py b/validity/models/polling.py index 7c9061b..2761dd9 100644 --- a/validity/models/polling.py +++ b/validity/models/polling.py @@ -11,7 +11,7 @@ from validity.fields import EncryptedDictField from validity.managers import CommandQS, PollerQS from validity.pollers import get_poller -from validity.subforms import CLICommandForm +from validity.subforms import CLICommandForm, JSONAPICommandForm, NetconfCommandForm from .base import BaseModel, SubformMixin from .serializer import Serializer @@ -51,7 +51,7 @@ class Command(SubformMixin, BaseModel): subform_type_field = "type" subform_json_field = "parameters" - subforms = {"CLI": CLICommandForm} + subforms = {"CLI": CLICommandForm, "json_api": JSONAPICommandForm, "netconf": NetconfCommandForm} class Meta: ordering = ("name",) @@ -66,8 +66,20 @@ def get_type_color(self): class Poller(BaseModel): name = models.CharField(_("Name"), max_length=255, unique=True) connection_type = models.CharField(_("Connection Type"), max_length=50, choices=ConnectionTypeChoices.choices) - public_credentials = models.JSONField(_("Public Credentials"), default=dict, blank=True) - private_credentials = EncryptedDictField(_("Private Credentials"), blank=True) + public_credentials = models.JSONField( + _("Public Credentials"), + default=dict, + blank=True, + help_text=_("Enter non-private parameters of the connection type in JSON format."), + ) + private_credentials = EncryptedDictField( + _("Private Credentials"), + blank=True, + help_text=_( + "Enter private parameters of the connection type in JSON format. " + "All the values are going to be encrypted." + ), + ) commands = models.ManyToManyField(Command, verbose_name=_("Commands"), related_name="pollers") objects = PollerQS.as_manager() @@ -79,7 +91,7 @@ def __str__(self) -> str: return self.name @property - def credentials(self): + def credentials(self) -> dict: return self.public_credentials | self.private_credentials.decrypted def get_connection_type_color(self): diff --git a/validity/models/serializer.py b/validity/models/serializer.py index d6349c0..6c5235a 100644 --- a/validity/models/serializer.py +++ b/validity/models/serializer.py @@ -6,19 +6,35 @@ from validity.choices import ExtractionMethodChoices from validity.compliance.serialization import serialize from validity.netbox_changes import DEVICE_ROLE_RELATION -from .base import BaseModel, DataSourceMixin +from validity.subforms import ( + RouterOSSerializerForm, + TEXTFSMSerializerForm, + TTPSerializerForm, + XMLSerializerForm, + YAMLSerializerForm, +) +from .base import BaseModel, DataSourceMixin, SubformMixin -class Serializer(DataSourceMixin, BaseModel): +class Serializer(SubformMixin, DataSourceMixin, BaseModel): name = models.CharField(_("Name"), max_length=255, unique=True) - extraction_method = models.CharField( - _("Extraction Method"), max_length=10, choices=ExtractionMethodChoices.choices, default="TTP" - ) + extraction_method = models.CharField(_("Extraction Method"), max_length=10, choices=ExtractionMethodChoices.choices) template = models.TextField(_("Template"), blank=True) + parameters = models.JSONField(_("Parameters"), default=dict, blank=True) clone_fields = ("template", "extraction_method", "data_source", "data_file") text_db_field_name = "template" + requires_template = {"TTP", "TEXTFSM"} _serialize = serialize + subform_json_field = "parameters" + subform_type_field = "extraction_method" + subforms = { + "ROUTEROS": RouterOSSerializerForm, + "XML": XMLSerializerForm, + "TTP": TTPSerializerForm, + "TEXTFSM": TEXTFSMSerializerForm, + "YAML": YAMLSerializerForm, + } class Meta: ordering = ("name",) @@ -32,16 +48,18 @@ def get_extraction_method_color(self): @property def _validate_db_or_git_filled(self) -> bool: - return self.extraction_method == "TTP" + return self.extraction_method in self.requires_template def clean(self) -> None: super().clean() - if self.extraction_method != "TTP" and self.template: - raise ValidationError({"template": _("Template must be empty if extraction method is not TTP")}) - if self.extraction_method != "TTP" and (self.data_source or self.data_file): - raise ValidationError(_("Git properties may be set only if extraction method is TTP")) - if self.extraction_method == "TTP" and not (self.template or self.data_source): - raise ValidationError(_("Template must be defined if extraction method is TTP")) + if self.extraction_method not in self.requires_template and self.template: + raise ValidationError({"template": _("Template must be empty for selected extraction method")}) + if self.extraction_method not in self.requires_template and (self.data_source or self.data_file): + raise ValidationError(_("Data Source/File properties cannot be set for selected extraction method")) + if self.extraction_method in self.requires_template and not ( + self.template or self.data_source and self.data_file + ): + raise ValidationError(_("Template must be defined for selected extraction method")) @property def bound_devices(self) -> models.QuerySet[Device]: @@ -58,4 +76,4 @@ def effective_template(self) -> str: return self.effective_text_field() def serialize(self, data: str) -> dict: - return self._serialize(self.extraction_method, data, self.effective_template) + return self._serialize(self, data) diff --git a/validity/pollers/default_credentials.py b/validity/pollers/default_credentials.py new file mode 100644 index 0000000..974ca00 --- /dev/null +++ b/validity/pollers/default_credentials.py @@ -0,0 +1,51 @@ +""" +Models from this module are used by js script to render default credentials for a new poller in UI +""" +from typing import Any + +from pydantic import BaseModel + + +class EmptyCredentials(BaseModel): + pass + + +class NetmikoPublicCreds(BaseModel): + device_type: str = "" + username: str = "" + + +class NetmikoPrivateCreds(BaseModel): + password: str = "" + + +class ScrapliNeconfPublicCreds(BaseModel): + auth_username: str = "" + auth_strict_key: bool = False + port: int = 830 + + +class ScrapliNeconfPrivateCreds(BaseModel): + auth_password: str = "" + + +class RequestsPublicCreds(BaseModel): + url: str = "https://{{device.primary_ip.address.ip}}/{{command.parameters.url_path.lstrip('/')}}" + + +class ConnectionTypeCredentials(BaseModel): + public: Any + private: Any + + +class AllCredentials(BaseModel): + netmiko: ConnectionTypeCredentials + scrapli_netconf: ConnectionTypeCredentials + requests: ConnectionTypeCredentials + + +all_credentials = AllCredentials( + netmiko=ConnectionTypeCredentials(public=NetmikoPublicCreds(), private=NetmikoPrivateCreds()), + scrapli_netconf=ConnectionTypeCredentials(public=ScrapliNeconfPublicCreds(), private=ScrapliNeconfPrivateCreds()), + requests=ConnectionTypeCredentials(public=RequestsPublicCreds(), private=EmptyCredentials()), +) diff --git a/validity/pollers/factory.py b/validity/pollers/factory.py index 96bc9c8..366b9c0 100644 --- a/validity/pollers/factory.py +++ b/validity/pollers/factory.py @@ -3,6 +3,8 @@ from validity.choices import ConnectionTypeChoices from .base import DevicePoller from .cli import NetmikoPoller +from .http import RequestsPoller +from .netconf import ScrapliNetconfPoller if TYPE_CHECKING: @@ -16,7 +18,13 @@ def __init__(self, poller_map: dict) -> None: def __call__(self, connection_type: str, credentials: dict, commands: Sequence["Command"]) -> DevicePoller: if poller_cls := self.poller_map.get(connection_type): return poller_cls(credentials=credentials, commands=commands) - raise KeyError("No poller exist for this connection type", connection_type) + raise KeyError("No poller exists for this connection type", connection_type) -get_poller = PollerFactory(poller_map={ConnectionTypeChoices.netmiko: NetmikoPoller}) +get_poller = PollerFactory( + poller_map={ + ConnectionTypeChoices.netmiko: NetmikoPoller, + ConnectionTypeChoices.requests: RequestsPoller, + ConnectionTypeChoices.scrapli_netconf: ScrapliNetconfPoller, + } +) diff --git a/validity/pollers/http.py b/validity/pollers/http.py new file mode 100644 index 0000000..0dfc6ba --- /dev/null +++ b/validity/pollers/http.py @@ -0,0 +1,58 @@ +from typing import TYPE_CHECKING + +import requests +from dcim.models import Device +from pydantic import BaseModel, Field + +from validity.j2_env import Environment +from validity.utils.json import transform_json +from .base import ConsecutivePoller + + +if TYPE_CHECKING: + from validity.models import Command, VDevice + + +class RequestParams(BaseModel, extra="allow"): + url: str = Field( + "https://{{device.primary_ip.address.ip}}/{{command.parameters.url_path.lstrip('/')}}", exclude=True + ) + verify: bool | str = False + auth: tuple[str, ...] | None = None + + def rendered_url(self, device: "Device", command: "Command") -> str: + return Environment().from_string(self.url).render(device=device, command=command) + + +class HttpDriver: + def __init__(self, device: Device, **poller_credentials) -> None: + self.device = device + self.request_params = RequestParams.model_validate(poller_credentials) + + def render_body(self, orig_body: dict, command: "Command"): + return transform_json( + orig_body, + match_fn=lambda _, value: isinstance(value, str), + transform_fn=lambda key, value: ( + key, + Environment().from_string(value).render(device=self.device, command=command), + ), + ) + + def request(self, command: "Command", *, requests=requests) -> str: + request_kwargs = self.request_params.model_dump() + request_kwargs["url"] = self.request_params.rendered_url(self.device, command) + request_kwargs["method"] = command.parameters["method"] + if body := self.render_body(command.parameters["body"], command): + request_kwargs["json"] = body + return requests.request(**request_kwargs).content.decode() + + +class RequestsPoller(ConsecutivePoller): + driver_cls = HttpDriver + + def get_credentials(self, device: "VDevice"): + return self.credentials | {"device": device} + + def poll_one_command(self, driver: HttpDriver, command: "Command") -> str: + return driver.request(command) diff --git a/validity/pollers/netconf.py b/validity/pollers/netconf.py new file mode 100644 index 0000000..b264781 --- /dev/null +++ b/validity/pollers/netconf.py @@ -0,0 +1,19 @@ +from typing import TYPE_CHECKING + +from scrapli_netconf.driver import NetconfDriver + +from .base import ConsecutivePoller + + +if TYPE_CHECKING: + from validity.models import Command + + +class ScrapliNetconfPoller(ConsecutivePoller): + driver_cls = NetconfDriver + host_param_name = "host" + + def poll_one_command(self, driver: NetconfDriver, command: "Command") -> str: + with driver: + response = driver.rpc(command.parameters["rpc"]) + return response.result diff --git a/validity/static/validity/connection-type-select.js b/validity/static/validity/connection-type-select.js new file mode 100644 index 0000000..969effd --- /dev/null +++ b/validity/static/validity/connection-type-select.js @@ -0,0 +1,19 @@ +function fillTextArea(public_creds, private_creds) { + document.getElementById('id_public_credentials').value = JSON.stringify(public_creds, null, 2); + document.getElementById('id_private_credentials').value = JSON.stringify(private_creds, null, 2); +} + +function fillCredentials(connectionTypeInfo) { + try { + const connectionType = connectionTypeInfo.value; + if (connectionType == "") + return; + const defaultCredentials = JSON.parse(document.getElementById('default_credentials').textContent)[connectionType]; + fillTextArea(defaultCredentials.public, defaultCredentials.private); + } catch(e) { + console.log(e.name, e.message) + } + +} + +window.onload = () => {document.getElementById('connection_type_select').slim.onChange = fillCredentials} diff --git a/validity/subforms.py b/validity/subforms.py index aedf13e..1eb94cb 100644 --- a/validity/subforms.py +++ b/validity/subforms.py @@ -3,10 +3,96 @@ 1. Render part of the main form for JSON Field 2. Validate JSON Field """ +import textwrap +import xml.etree.ElementTree as ET + from django import forms from django.utils.translation import gettext_lazy as _ from utilities.forms import BootstrapMixin +from validity.choices import JSONAPIMethodChoices +from validity.utils.json import jq +from validity.utils.misc import reraise + + +class BaseSubform(BootstrapMixin, forms.Form): + def clean(self): + if self.data.keys() - self.base_fields.keys(): + allowed_fields = ", ".join(self.base_fields.keys()) + raise forms.ValidationError(_("Only these keys are allowed: %(fields)s"), params={"fields": allowed_fields}) + return self.cleaned_data + -class CLICommandForm(BootstrapMixin, forms.Form): +# Command Subforms + + +class CLICommandForm(BaseSubform): cli_command = forms.CharField(label=_("CLI Command")) + + +class JSONAPICommandForm(BaseSubform): + method = forms.ChoiceField(label=_("Method"), initial="GET", choices=JSONAPIMethodChoices.choices) + url_path = forms.CharField(label=_("URL Path")) + body = forms.JSONField( + label=_("Body"), + required=False, + help_text=_("Enter data in JSON format. You can use Jinja2 expressions as values."), + ) + + +class NetconfCommandForm(BaseSubform): + get_config = textwrap.dedent( + """ + + + + + + """ + ).lstrip("\n") + rpc = forms.CharField(label=_("RPC"), widget=forms.Textarea(attrs={"placeholder": get_config})) + + def clean_rpc(self): + rpc = self.cleaned_data["rpc"] + with reraise(Exception, forms.ValidationError, {"rpc": "Invalid XML"}): + ET.fromstring(rpc) + return rpc + + +# Serializer Subforms + + +class SerializerBaseForm(BaseSubform): + jq_expression = forms.CharField( + label=_("JQ Expression"), + required=False, + help_text=_("Post-process parsing result with this JQ expression"), + widget=forms.TextInput(attrs={"style": "font-family:monospace"}), + ) + + def clean_jq_expression(self): + if jq_expression := self.cleaned_data.get("jq_expression"): + with reraise(Exception, forms.ValidationError, "Invalid JQ Expression"): + jq.compile(jq_expression) + return jq_expression + + +class XMLSerializerForm(SerializerBaseForm): + drop_attributes = forms.BooleanField(label=_("Drop XML Attributes"), initial=False, required=False) + requires_template = False + + +class TTPSerializerForm(SerializerBaseForm): + requires_template = True + + +class TEXTFSMSerializerForm(SerializerBaseForm): + requires_template = True + + +class RouterOSSerializerForm(BaseSubform): + requires_template = False + + +class YAMLSerializerForm(SerializerBaseForm): + requires_template = False diff --git a/validity/tables.py b/validity/tables.py index ab6ad64..c1bedab 100644 --- a/validity/tables.py +++ b/validity/tables.py @@ -11,7 +11,7 @@ from django_tables2 import Column, RequestConfig, Table, TemplateColumn from netbox.tables import BooleanColumn as BooleanColumn from netbox.tables import ChoiceFieldColumn, ManyToManyColumn, NetBoxTable -from netbox.tables.columns import ActionsColumn +from netbox.tables.columns import ActionsColumn, LinkedCountColumn from utilities.paginator import EnhancedPaginator from validity import models @@ -91,12 +91,15 @@ def render_total_devices(self, record): class SerializerTable(TotalDevicesMixin, NetBoxTable): name = Column(linkify=True) extraction_method = ChoiceFieldColumn() + command_count = LinkedCountColumn( + verbose_name=_("Commands"), viewname="plugins:validity:command_list", url_params={"serializer_id": "pk"} + ) count_per = "serializer" class Meta(NetBoxTable.Meta): model = models.Serializer - fields = ("name", "extraction_method", "total_devices") + fields = ("name", "extraction_method", "total_devices", "command_count") default_columns = fields diff --git a/validity/templates/validity/command.html b/validity/templates/validity/command.html index 678ab99..9524398 100644 --- a/validity/templates/validity/command.html +++ b/validity/templates/validity/command.html @@ -39,25 +39,7 @@
Command
{% include 'inc/panels/tags.html' %}
-
-
Parameters
-
- - {% for field_name, field in object.parameters_form.base_fields.items %} - - - - - {% empty %} - - - - {% endfor %} -
{{ field.label }}{{ object.parameters | get_key:field_name | placeholder }}
- No parameters defined -
-
-
+ {% include 'validity/inc/parameters.html' with title='Parameters' parameters=object.parameters form=object.subform_cls only %}
{% endblock content %} diff --git a/validity/templates/validity/inc/parameters.html b/validity/templates/validity/inc/parameters.html new file mode 100644 index 0000000..bb2b11e --- /dev/null +++ b/validity/templates/validity/inc/parameters.html @@ -0,0 +1,20 @@ +{% load helpers %} +
+
{{ title | capfirst }}
+
+ + {% for field_name, field in form.base_fields.items %} + + + + + {% empty %} + + + + {% endfor %} +
{{ field.label }}{{ parameters | get_key:field_name | placeholder }}
+ No {{ title }} defined +
+
+
\ No newline at end of file diff --git a/validity/templates/validity/poller_edit.html b/validity/templates/validity/poller_edit.html new file mode 100644 index 0000000..acf1493 --- /dev/null +++ b/validity/templates/validity/poller_edit.html @@ -0,0 +1,14 @@ +{% extends 'generic/object_edit.html' %} +{% load static %} +{% block javascript %} +{{ block.super }} + + +{% endblock %} diff --git a/validity/templates/validity/serializer.html b/validity/templates/validity/serializer.html index 88f25b2..920a039 100644 --- a/validity/templates/validity/serializer.html +++ b/validity/templates/validity/serializer.html @@ -22,6 +22,9 @@
Serializer
+
+ {% include 'validity/inc/parameters.html' with title='Parameters' parameters=object.parameters form=object.subform_cls only %} +
{% include 'inc/panels/related_objects.html' %}
diff --git a/validity/templatetags/validity.py b/validity/templatetags/validity.py index 8c452cb..e0df745 100644 --- a/validity/templatetags/validity.py +++ b/validity/templatetags/validity.py @@ -39,12 +39,6 @@ def data_source(model) -> str: return _("Data Source") if model.data_source else _("DB") -@register.filter -def add_query_param(url: str, param: str) -> str: - delimeter = "&" if "?" in url else "?" - return f"{url}{delimeter}{param}" - - @register.filter def colorful_percentage(percent): return _colorful_percentage(percent) diff --git a/validity/tests/test_api.py b/validity/tests/test_api.py index 2b20def..e664785 100644 --- a/validity/tests/test_api.py +++ b/validity/tests/test_api.py @@ -87,6 +87,29 @@ class TestDSSerializer(ApiPostGetTest): } +class TestSerializerParams(ApiPostGetTest): + entity = "serializers" + parameters = {"jq_expression": ".interface"} + post_body = { + "name": "serializer-1", + "extraction_method": "TTP", + "template": "interface {{interface}}", + "parameters": parameters, + } + + @pytest.mark.parametrize("params", [{"jq_expression": "(("}, {"unknown_param": 123}]) + def test_wrong_params(self, admin_client, params): + body = self.post_body | {"parameters": params} + resp = admin_client.post(self.url(), body, content_type="application/json") + assert resp.status_code == HTTPStatus.BAD_REQUEST, resp.data + + +class TestSerializerWrongParams(ApiPostGetTest): + entity = "serializers" + parameters = {"jq_expression": ".interface"} + post_body = {"name": "serializer-1", "extraction_method": "TTP", "template": "interface {{interface}}"} + + class TestDBTest(ApiPostGetTest): entity = "tests" post_body = { diff --git a/validity/tests/test_compliance/test_eval.py b/validity/tests/test_compliance/test_eval.py index aad52e8..99fab70 100644 --- a/validity/tests/test_compliance/test_eval.py +++ b/validity/tests/test_compliance/test_eval.py @@ -22,6 +22,12 @@ ("Deepdiff for previous comparison [#2]", {"values_changed": {"root[1]": {"new_value": 12, "old_value": 11}}}), ] +JQ_EXPR = "jq.first('. | mkarr(.a)', {'a': 1}) == {'a': [1]}" +JQ_EXPLANATION = [ + ("jq.first('. | mkarr(.a)', {'a': 1})", {"a": [1]}), + ("jq.first('. | mkarr(.a)', {'a': 1}) == {'a': [1]}", True), +] + @pytest.mark.parametrize( "expression, explanation, error", @@ -31,10 +37,11 @@ pytest.param(EXPR_2, EXPLANATION_2, None, id="EXPR_2"), pytest.param("some invalid syntax", [], EvalError, id="invalid syntax"), pytest.param("def f(): pass", [], EvalError, id="invalid expression"), + pytest.param(JQ_EXPR, JQ_EXPLANATION, None, id="jq_expr"), ], ) def test_explanation(expression, explanation, error): - evaluator = ExplanationalEval() + evaluator = ExplanationalEval(load_defaults=True) context = nullcontext() if error is None else pytest.raises(error) with context: evaluator.eval(expression) diff --git a/validity/tests/test_compliance/test_serialization.py b/validity/tests/test_compliance/test_serialization.py index 144a630..0e8fa48 100644 --- a/validity/tests/test_compliance/test_serialization.py +++ b/validity/tests/test_compliance/test_serialization.py @@ -1,9 +1,11 @@ import json +from unittest.mock import Mock import pytest import yaml from validity.compliance.serialization import serialize +from validity.compliance.serialization.common import postprocess_jq JSON_CONFIG = """ @@ -97,6 +99,31 @@ } +TEXTFSM_TEMPLATE = r"""Value INTF (\S+) +Value ADDR (\S+) +Value STATUS (up|down) +Value PROTO (up|down) + +Start + ^${INTF}\s+${ADDR}\s+\w+\s+\w+\s+${STATUS}\s+${PROTO} -> Record +""" + +TEXTFSM_STATE = """ +Interface IP-Address OK? Method Status Protocol +FastEthernet0/0 15.0.15.1 YES manual up up +FastEthernet0/1 10.0.12.1 YES manual up up +FastEthernet0/2 unassigned YES manual up up +Loopback100 100.0.0.1 YES manual up up +""" + +TEXTFSM_SERIALIZED = [ + {"ADDR": "15.0.15.1", "INTF": "FastEthernet0/0", "PROTO": "up", "STATUS": "up"}, + {"ADDR": "10.0.12.1", "INTF": "FastEthernet0/1", "PROTO": "up", "STATUS": "up"}, + {"ADDR": "unassigned", "INTF": "FastEthernet0/2", "PROTO": "up", "STATUS": "up"}, + {"ADDR": "100.0.0.1", "INTF": "Loopback100", "PROTO": "up", "STATUS": "up"}, +] + + @pytest.mark.parametrize( "extraction_method, contents, template, serialized", [ @@ -104,9 +131,26 @@ pytest.param("YAML", YAML_CONFIG, "", yaml.safe_load(YAML_CONFIG), id="YAML"), pytest.param("TTP", TTP_CONFIG, TTP_TEMPLATE, TTP_SERIALIZED, id="TTP"), pytest.param("ROUTEROS", ROUTEROS_CONFIG, "", ROUTEROS_SERIALIZED, id="ROUTEROS"), + pytest.param("XML", "text", "", {"a": {"b": "text"}}, id="XML"), + pytest.param("TEXTFSM", TEXTFSM_STATE, TEXTFSM_TEMPLATE, TEXTFSM_SERIALIZED, id="TEXTFSM"), ], ) @pytest.mark.django_db def test_serialization(extraction_method, contents, template, serialized): - serialize_result = serialize(extraction_method, contents, template) + serializer = Mock(extraction_method=extraction_method, effective_template=template, parameters={}) + serialize_result = serialize(serializer, contents) assert serialize_result == serialized + + +@pytest.mark.parametrize( + "serialized_data, jq_expression, expected_result", + [({"a": {"b": [1, 2]}}, ".a.b", [1, 2]), ({"a": {"b": "c"}}, ". | mkarr(.a.b)", {"a": {"b": ["c"]}})], +) +def test_postprocess_jq(serialized_data, jq_expression, expected_result): + @postprocess_jq + def serialization_method(plain_data, template, parameters): + return json.loads(plain_data) + + json_data = json.dumps(serialized_data) + result = serialization_method(json_data, "", {"jq_expression": jq_expression}) + assert result == expected_result diff --git a/validity/tests/test_pollers.py b/validity/tests/test_pollers.py index cdf72bb..6411e7b 100644 --- a/validity/tests/test_pollers.py +++ b/validity/tests/test_pollers.py @@ -1,9 +1,10 @@ import time -from unittest.mock import Mock +from unittest.mock import MagicMock, Mock import pytest from validity.pollers import NetmikoPoller +from validity.pollers.http import HttpDriver class TestNetmikoPoller: @@ -58,3 +59,28 @@ def poll(arg): assert all(res.error.message.startswith("OSError") for res in results) else: assert all(res.result in {"a", "b"} for res in results) + + +def test_http_driver(): + device = Mock(**{"primary_ip.address.ip": "1.1.1.1"}) + device.name = "d1" + command = Mock( + parameters={"url_path": "/some/path/", "method": "post", "body": {"a": "b", "device_name": "{{device.name}}"}} + ) + creds = { + "url": "https://{{device.primary_ip.address.ip}}{{command.parameters.url_path}}", + "verify": True, + "qwe": "rty", + } + driver = HttpDriver(device, **creds) + requests = MagicMock() + result = driver.request(command, requests=requests) + requests.request.assert_called_once_with( + url="https://1.1.1.1/some/path/", + verify=True, + qwe="rty", + method="post", + json={"a": "b", "device_name": "d1"}, + auth=None, + ) + assert result == requests.request.return_value.content.decode.return_value diff --git a/validity/tests/test_utils/test_json.py b/validity/tests/test_utils/test_json.py new file mode 100644 index 0000000..9fa7766 --- /dev/null +++ b/validity/tests/test_utils/test_json.py @@ -0,0 +1,48 @@ +import pytest + +from validity.utils.json import jq, transform_json + + +class TestTransformJson: + JSON = { + "users": [{"name": "John", "age": 30}, {"name": "Jack", "age": 20}], + "groups": { + "admin": { + "members": ["John", "Anna", "Jack"], + }, + }, + } + + def test_change_value(self): + result = transform_json( + self.JSON, + match_fn=lambda key, val: isinstance(val, dict) and "name" in val, + transform_fn=lambda key, val: (key, val | {"nickname": val["name"] + str(val["age"])}), + ) + assert result["users"][0]["nickname"] == "John30" + assert result["users"][1]["nickname"] == "Jack20" + assert result["groups"] == self.JSON["groups"] + + def test_delete(self): + result = transform_json(self.JSON, match_fn=lambda key, val: val == "Anna", transform_fn=lambda key, val: None) + assert result["groups"]["admin"]["members"] == ["John", "Jack"] + + def test_change_key(self): + result = transform_json( + self.JSON, match_fn=lambda key, val: key == "admin", transform_fn=lambda key, val: ("admin2", val) + ) + assert result["groups"]["admin2"] == self.JSON["groups"]["admin"] + assert "admin" not in result["groups"] + + +@pytest.mark.parametrize( + "data, expression, result", + [ + ({"a": {"b": "one", "c": "two"}}, ". | mkarr(.a.b)", {"a": {"b": ["one"], "c": "two"}}), + ({"a": {"b": ["one"], "c": "two"}}, ". | mkarr(.a.b)", {"a": {"b": ["one"], "c": "two"}}), + ({"a": "10.2", "b": {"c": "20"}}, ". | mknum(.b)", {"a": "10.2", "b": {"c": 20}}), + ({"a": "10.2", "b": {"c": "20"}}, ". | mknum", {"a": 10.2, "b": {"c": 20}}), + ], +) +def test_jq(data, expression, result): + assert jq.first(expression, data) == result diff --git a/validity/utils/json.py b/validity/utils/json.py new file mode 100644 index 0000000..81489b8 --- /dev/null +++ b/validity/utils/json.py @@ -0,0 +1,78 @@ +import copy +from typing import Callable, Collection, Protocol + +import jq as pyjq + + +Json = dict[str, "Json"] | list["Json"] | int | float | str | None + + +class TransformFn(Protocol): + def __call__(self, key: int | str, value: Json, /) -> tuple[int | str, Json] | None: + ... + + +def transform_json(data: Json, match_fn: Callable[[int | str, Json], bool], transform_fn: TransformFn) -> Json: + """ + Traverse JSON-like struct recursively and apply "tranform_fn" to keys and values matched by "match_fn" + """ + + def transform(data_item: Json) -> None: + if isinstance(data_item, str) or not isinstance(data_item, Collection): + return + iterator = data_item.items() if isinstance(data_item, dict) else enumerate(data_item) + delete_keys = [] + new_values = {} + for key, value in iterator: + if match_fn(key, value): + result = transform_fn(key, value) + if result is None: + delete_keys.append(key) + continue + new_key, new_value = result + if new_key == key: + data_item[key] = new_value + else: + delete_keys.append(key) + new_values[new_key] = new_value + elif isinstance(value, Collection): + transform(value) + for del_key in delete_keys: + del data_item[del_key] + for new_key, new_value in new_values.items(): + data_item[new_key] = new_value + + data_copy = copy.deepcopy(data) + transform(data_copy) + return data_copy + + +class jq: + _extra_functions = [ + # ensures that expression at "pth" is an array + 'def mkarr(pth): . | pth as $tgt | . | pth = if $tgt | type != "array" then [$tgt] else $tgt end', + # recursively converts all number-like strings to numbers + "def mknum(pth):. | pth as $tgt | . | pth = " + '($tgt | walk(if type == "string" and test("[+-]?([0-9]*[.])?[0-9]+") then . | tonumber else . end))', + 'def mknum: walk(if type == "string" and test("[+-]?([0-9]*[.])?[0-9]+") then . | tonumber else . end)', + ] + + @classmethod + def _add_extra_functions(cls, expression): + extra_funcs = ";".join(cls._extra_functions) + return f"{extra_funcs};{expression}" + + @classmethod + def first(cls, expression, data): + return pyjq.first(cls._add_extra_functions(expression), data) + + @classmethod + def all(cls, expression, data): + return pyjq.all(cls._add_extra_functions(expression), data) + + @classmethod + def compile(cls, expression): + return pyjq.compile(cls._add_extra_functions(expression)) + + def __init__(self, *args, **kwargs) -> None: + raise TypeError("jq is not callable") diff --git a/validity/views/poller.py b/validity/views/poller.py index 260c74c..d46b2a1 100644 --- a/validity/views/poller.py +++ b/validity/views/poller.py @@ -4,6 +4,7 @@ from utilities.views import register_model_view from validity import filtersets, forms, models, tables +from validity.pollers.default_credentials import all_credentials from .base import TableMixin @@ -40,3 +41,7 @@ class PollerBulkDeleteView(generic.BulkDeleteView): class PollerEditView(generic.ObjectEditView): queryset = models.Poller.objects.all() form = forms.PollerForm + template_name = "validity/poller_edit.html" + + def get_extra_context(self, request, instance): + return {"default_credentials": all_credentials} diff --git a/validity/views/serializer.py b/validity/views/serializer.py index e65189d..88249bf 100644 --- a/validity/views/serializer.py +++ b/validity/views/serializer.py @@ -1,7 +1,7 @@ from dcim.filtersets import DeviceFilterSet from dcim.models import Device, DeviceType, Manufacturer from dcim.tables import DeviceTable -from django.db.models import Q +from django.db.models import Count, Q from netbox.views import generic from utilities.views import register_model_view @@ -10,7 +10,7 @@ class SerializerListView(generic.ObjectListView): - queryset = models.Serializer.objects.all() + queryset = models.Serializer.objects.annotate(command_count=Count("commands")) table = tables.SerializerTable filterset = filtersets.SerializerFilterSet filterset_form = forms.SerializerFilterForm