diff --git a/.flake8 b/.flake8 index e3ba27d5..aaa63b60 100644 --- a/.flake8 +++ b/.flake8 @@ -1,4 +1,4 @@ [flake8] # E501: Line length is enforced by Black, so flake8 doesn't need to check it # W503: Black disagrees with this rule, as does PEP 8; Black wins -ignore = E501, W503 +ignore = E501, W503, F811, F401, F405, E203 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..42c2d4a6 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,173 @@ +--- +name: "CI" +on: # yamllint disable + - "push" + - "pull_request" + +jobs: + black: + runs-on: "ubuntu-20.04" + env: + INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Linting: black" + run: "poetry run invoke black" + bandit: + runs-on: "ubuntu-20.04" + env: + INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Linting: bandit" + run: "poetry run invoke bandit" + needs: + - "black" + pydocstyle: + runs-on: "ubuntu-20.04" + env: + INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Linting: pydocstyle" + run: "poetry run invoke pydocstyle" + needs: + - "black" + flake8: + runs-on: "ubuntu-20.04" + env: + INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Linting: flake8" + run: "poetry run invoke flake8" + needs: + - "black" + yamllint: + runs-on: "ubuntu-20.04" + env: + INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Linting: yamllint" + run: "poetry run invoke yamllint" + needs: + - "black" + build: + runs-on: "ubuntu-20.04" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Build Container" + run: "poetry run invoke build" + needs: + - "bandit" + - "pydocstyle" + - "flake8" + - "yamllint" + pylint: + runs-on: "ubuntu-20.04" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Build Container" + run: "poetry run invoke build" + - name: "Linting: Pylint" + run: "poetry run invoke pylint" + needs: + - "build" + unittest: + strategy: + fail-fast: true + matrix: + python-version: ["3.6", "3.7", "3.8", "3.9"] + nautobot-version: ["1.0.1"] + runs-on: "ubuntu-20.04" + env: + INVOKE_NAUTOBOT_GOLDEN_CONFIG_PYTHON_VER: "${{ matrix.python-version }}" + INVOKE_NAUTOBOT_GOLDEN_CONFIG_NAUTOBOT_VER: "${{ matrix.nautobot-version }}" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v1" + - name: "Build Container" + run: "poetry run invoke build" + - name: "Run Tests" + run: "poetry run invoke unittest" + needs: + - "pylint" + publish_gh: + name: "Publish to GitHub" + runs-on: "ubuntu-20.04" + if: "startsWith(github.ref, 'refs/tags/v')" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Set up Python" + uses: "actions/setup-python@v2" + with: + python-version: "3.9" + - name: "Install Python Packages" + run: "pip install poetry" + - name: "Set env" + run: "echo RELEASE_VERSION=${GITHUB_REF:10} >> $GITHUB_ENV" + - name: "Run Poetry Version" + run: "poetry version $RELEASE_VERSION" + - name: "Run Poetry Build" + run: "poetry build" + - name: "Upload binaries to release" + uses: "svenstaro/upload-release-action@v2" + with: + repo_token: "${{ secrets.NTC_GITHUB_TOKEN }}" + file: "dist/*" + tag: "${{ github.ref }}" + overwrite: true + file_glob: true + needs: + - "unittest" + publish_pypi: + name: "Push Package to PyPI" + runs-on: "ubuntu-20.04" + if: "startsWith(github.ref, 'refs/tags/v')" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Set up Python" + uses: "actions/setup-python@v2" + with: + python-version: "3.9" + - name: "Install Python Packages" + run: "pip install poetry" + - name: "Set env" + run: "echo RELEASE_VERSION=${GITHUB_REF:10} >> $GITHUB_ENV" + - name: "Run Poetry Version" + run: "poetry version $RELEASE_VERSION" + - name: "Run Poetry Build" + run: "poetry build" + - name: "Push to PyPI" + uses: "pypa/gh-action-pypi-publish@release/v1" + with: + user: "__token__" + password: "${{ secrets.PYPI_API_TOKEN }}" + needs: + - "unittest" diff --git a/.pydocstyle.ini b/.pydocstyle.ini index c4abbf85..71bf7596 100644 --- a/.pydocstyle.ini +++ b/.pydocstyle.ini @@ -1,3 +1,11 @@ [pydocstyle] convention = google inherit = false +match = (?!__init__).*\.py +match-dir = (?!tests)[^\.].* +# D212 is enabled by default in google convention, and complains if we have a docstring like: +# """ +# My docstring is on the line after the opening quotes instead of on the same line as them. +# """ +# We've discussed and concluded that we consider this to be a valid style choice. +add_ignore = D212, D417 \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index e9a249f9..00000000 --- a/.travis.yml +++ /dev/null @@ -1,70 +0,0 @@ ---- -stages: - - name: "lint" - - name: "test" - - name: "deploy-github" - if: "tag IS present" - - name: "deploy_pypi" - if: "tag IS present" - -language: "python" -python: - - 3.6 - - 3.7 - - 3.8 - -services: - - "docker" - -# -------------------------------------------------------------------------- -# Tests -# -------------------------------------------------------------------------- -before_install: - - 'docker login -u "$DOCKER_HUB_USERNAME" --password "$DOCKER_HUB_PASSWORD"' # yamllint disable-line rule:quoted-strings -before_script: - - "pip install invoke toml docker-compose poetry" -script: - - "invoke build --python-ver $TRAVIS_PYTHON_VERSION" - - "invoke unittest" - -jobs: - include: - - stage: "lint" - before_script: - - "pip install invoke toml docker-compose" - - "invoke build" - script: - - "invoke black" - - "invoke bandit" - - "invoke pydocstyle" - - "invoke pylint" - - - stage: "deploy-github" - before_script: - - "pip install --upgrade pip" - - "pip install poetry" - script: - - "poetry version $TRAVIS_TAG" - - "poetry build" - deploy: - provider: "releases" - api_key: "$GITHUB_AUTH_TOKEN" - file_glob: true - file: "dist/*" - skip_cleanup: true - "on": - all_branches: true - - - stage: "deploy_pypi" - before_script: - - "pip install --upgrade pip" - - "pip install poetry" - script: - - "echo Deploying the release to PyPI" - - "poetry version $TRAVIS_TAG" - deploy: - provider: "script" - skip_cleanup: true - script: "poetry publish --build -u __token__ -p $PYPI_TOKEN" - "on": - all_branches: true diff --git a/.yamllint.yml b/.yamllint.yml new file mode 100644 index 00000000..58324ed1 --- /dev/null +++ b/.yamllint.yml @@ -0,0 +1,10 @@ +--- +extends: "default" +rules: + comments: "enable" + empty-values: "enable" + indentation: + indent-sequences: "consistent" + line-length: "disable" + quoted-strings: + quote-type: "double" diff --git a/CHANGELOG.md b/CHANGELOG.md index d649b77f..ced8c3a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## v0.9.6 - 2021-09 + +### Fixed + +- #95 Fix credential escaping issues on Git +- #113 Clean up and normalize GraphQL decorator +- #41 Fail Gracefully when platform is missing or wrong, bump nautobot-plugin-nornir version +- #104 Fail Gracefully when Device queryset is empty +- #109 Account for Nautobot 1.0/1.1 template change + +### Added + +- #103 Add manage commands for jobs +- #108 Update docs and add quick start guide +- #105 Added structure data config compliance +- #119 Migrate to Github Actions +- #121 Moved to Celery for development environment + ## v0.9.5 - 2021-07 ### Fixed diff --git a/README.md b/README.md index b919354a..53e2768c 100644 --- a/README.md +++ b/README.md @@ -4,27 +4,38 @@ A plugin for [Nautobot](https://github.com/nautobot/nautobot) that intends to pr **This version is currently in Beta and will require a rebuild of the database for a 1.0 release.** -# Overview -You may see the [Navigating Overview](./docs/navigating-golden.md) documentation for an overview of navigating through the different areas of this plugin. You may also see the [FAQ](./docs/FAQ.md) for commonly asked questions. +# Introduction -The golden configuration plugin performs four primary actions, each of which can be toggled on with a respective `enable_*` setting, covered in detail -later in the readme. +## What is the Golden Configuration Plugin? -* Configuration Backup - Is a Nornir process to connect to devices, optionally parse out lines/secrets, backup the configuration, and save to a Git repository. - * see [Navigating Backup](./docs/navigating-backup.md) for more information -* Configuration Intended - Is a Nornir process to generate configuration based on a Git repo of Jinja files and a Git repo to store the intended configuration. - * see [Navigating Intended](./docs/navigating-intended.md) for more information -* Source of Truth Aggregation - Is a GraphQL query per device with that creates a data structure used in the generation of configuration. - * see [Navigating SoTAgg](./docs/navigating-sot-agg.md) for more information -* Configuration Compliance - Is a Nornir process to run comparison of the actual (via backups) and intended (via Jinja file creation) CLI configurations. - * see [Navigating Compliance](./docs/navigating-compliance.md) for more information +The golden configuration plugin is a Nautobot plugin that aims to solve common configuration management challenges. -The operator's of their own Nautobot instance are welcome to use any combination of these features. Though the appearance may seem like they are tightly +## Key Use Cases + +This plugin enable four (4) key use cases. + + +1. **Configuration Backups** - Is a Nornir process to connect to devices, optionally parse out lines/secrets, backup the configuration, and save to a Git repository. +2. **Intended Configuration** - Is a Nornir process to generate configuration based on a Git repo of Jinja files to combine with a GraphQL generated data and a Git repo to store the intended configuration. +3. **Source of Truth Aggregation** - Is a GraphQL query per device that creates a data structure used in the generation of configuration. +4. **Configuration Compliance** - Is a Nornir process to run comparison of the actual (via backups) and intended (via Jinja file creation) CLI configurations. + +>Notice: The operator's of their own Nautobot instance are welcome to use any combination of these features. Though the appearance may seem like they are tightly coupled, this isn't actually the case. For example, one can obtain backup configurations from their current RANCID/Oxidized process and simply provide a Git Repo of the location of the backup configurations, and the compliance process would work the same way. Also, another user may only want to generate configurations, but not want to use other features, which is perfectly fine to do so. +## Documentation +- [Installation](./docs/installation.md) +- [Quick Start Guide](./docs/quick-start.md) +- [Navigating Overview](./docs/navigating-golden.md) +- [Navigating Backup](./docs/navigating-backup.md) +- [Navigating Intended](./docs/navigating-intended.md) +- [Navigating SoTAgg](./docs/navigating-sot-agg.md) +- [Navigating Compliance](./docs/navigating-compliance.md) +- [FAQ](./docs/FAQ.md) + ## Screenshots There are many features and capabilities the plugin provides into the Nautobot ecosystem. The following screenshots are intended to provide a quick visual overview of some of these features. @@ -44,72 +55,6 @@ Drilling into a specific device and feature, you can get an immediate detailed u ![Compliance Rule](./docs/img/compliance-rule.png) -## Plugin Settings - -There is a setting to determine the inclusion of any of the four given components. - -* The `enable_backup`, `enable_compliance`, `enable_intended`, and `enable_sotagg` will toggle inclusion of the entire component. - -# Installation - -Plugins can be installed manually or use Python's `pip`. See the [nautobot documentation](https://nautobot.readthedocs.io/en/latest/plugins/#install-the-package) for more details. The pip package name for this plugin is [`nautobot-golden-config`](https://pypi.org/project/nautobot-golden-config/) - -> The plugin is compatible with Nautobot 1.0.0 and higher - -**Prerequisite:** The plugin relies on [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) to be installed and both plugins to be enabled in your configuration settings. - -**Required:** The following block of code below shows the additional configuration required to be added to your `nautobot_config.py` file: -- append `"nautobot_golden_config"` to the `PLUGINS` list -- append the `"nautobot_golden_config"` dictionary to the `PLUGINS_CONFIG` dictionary - -```python -PLUGINS = ["nautobot_plugin_nornir", "nautobot_golden_config"] - -PLUGINS_CONFIG = { - "nautobot_plugin_nornir": { - "nornir_settings": { - "credentials": "nautobot_plugin_nornir.plugins.credentials.env_vars.CredentialsEnvVars", - "runner": { - "plugin": "threaded", - "options": { - "num_workers": 20, - }, - }, - }, - }, - "nautobot_golden_config": { - "per_feature_bar_width": 0.15, - "per_feature_width": 13, - "per_feature_height": 4, - "enable_backup": True, - "enable_compliance": True, - "enable_intended": True, - "enable_sotagg": True, - "sot_agg_transposer": None, - "platform_slug_map": None, - }, -} - -``` - -The plugin behavior can be controlled with the following list of settings. - -| Key | Example | Default | Description | -| ------- | ------ | -------- | ------------------------------------- | -| enable_backup | True | True | A boolean to represent whether or not to run backup configurations within the plugin. | -| enable_compliance | True | True | A boolean to represent whether or not to run the compliance process within the plugin. | -| enable_intended | True | True | A boolean to represent whether or not to generate intended configurations within the plugin. | -| enable_sotagg | True | True | A boolean to represent whether or not to provide a GraphQL query per device to allow the intended configuration to provide data variables to the plugin. | -| platform_slug_map | {"cisco_wlc": "cisco_aireos"} | None | A dictionary in which the key is the platform slug and the value is what netutils uses in any "network_os" parameter. | -| sot_agg_transposer | mypkg.transposer | - | A string representation of a function that can post-process the graphQL data. | -| per_feature_bar_width | 0.15 | 0.15 | The width of the table bar within the overview report | -| per_feature_width | 13 | 13 | The width in inches that the overview table can be. | -| per_feature_height | 4 | 4 | The height in inches that the overview table can be. | - -> Note: Over time the intention is to make the compliance report more dynamic, but for now allow users to configure the `per_*` configs in a way that fits best for them. - -> Note: Review [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) for Nornir and dispatcher configuration options. - # Contributing Pull requests are welcomed and automatically built and tested against multiple versions of Python and Nautobot through TravisCI. @@ -129,7 +74,9 @@ The project features a CLI helper based on [invoke](http://www.pyinvoke.org/) to Each command can be executed with `invoke `. All commands support the arguments `--nautobot-ver` and `--python-ver` if you want to manually define the version of Python and Nautobot to use. Each command also has its own help `invoke --help` -### Local dev environment +> Note: to run the mysql (mariadb) development environment, set the environment variable as such `export NAUTOBOT_USE_MYSQL=1`. + +### Local Development Environment ``` build Build all docker images. diff --git a/development/Dockerfile b/development/Dockerfile index a08300df..4cb91cc3 100644 --- a/development/Dockerfile +++ b/development/Dockerfile @@ -14,4 +14,8 @@ RUN poetry install --no-interaction --no-ansi --no-root COPY . /source RUN poetry install --no-interaction --no-ansi +RUN apt update +RUN apt install -y libmariadb-dev-compat gcc +RUN pip install mysqlclient + COPY development/nautobot_config.py /opt/nautobot/nautobot_config.py \ No newline at end of file diff --git a/development/dev.env b/development/dev.env index e8c9c841..87323955 100644 --- a/development/dev.env +++ b/development/dev.env @@ -1,31 +1,41 @@ -ALLOWED_HOSTS=* -BANNER_TOP="Golden Config plugin dev" -CHANGELOG_RETENTION=0 -DEBUG=True -DEVELOPER=True -EMAIL_FROM=nautobot@example.com -EMAIL_PASSWORD= -EMAIL_PORT=25 -EMAIL_SERVER=localhost -EMAIL_TIMEOUT=5 -EMAIL_USERNAME=nautobot -MAX_PAGE_SIZE=0 -METRICS_ENABLED=True -NAPALM_TIMEOUT=5 -POSTGRES_DB=nautobot -POSTGRES_HOST=postgres -POSTGRES_PASSWORD=notverysecurepwd -POSTGRES_USER=nautobot -REDIS_HOST=redis -REDIS_PASSWORD=notverysecurepwd -REDIS_PORT=6379 -# REDIS_SSL=True +NAUTOBOT_ALLOWED_HOSTS=* +NAUTOBOT_CHANGELOG_RETENTION=0 +NAUTOBOT_CONFIG=/opt/nautobot/nautobot_config.py +NAUTOBOT_DB_HOST=db +NAUTOBOT_DB_NAME=nautobot +NAUTOBOT_DB_PASSWORD=decinablesprewad +NAUTOBOT_DB_USER=nautobot +NAUTOBOT_DB_TIMEOUT=300 +NAUTOBOT_DB_ENGINE=django.db.backends.postgresql +NAUTOBOT_MAX_PAGE_SIZE=0 +NAUTOBOT_NAPALM_TIMEOUT=5 +NAUTOBOT_REDIS_HOST=redis +NAUTOBOT_REDIS_PASSWORD=decinablesprewad +NAUTOBOT_REDIS_PORT=6379 # Uncomment REDIS_SSL if using SSL -SECRET_KEY=r8OwDznj!!dci#P9ghmRfdu1Ysxm0AiPeDCQhKE+N_rClfWNj +# NAUTOBOT_REDIS_SSL=True +NAUTOBOT_SECRET_KEY=012345678901234567890123456789012345678901234567890123456789 SUPERUSER_API_TOKEN=0123456789abcdef0123456789abcdef01234567 -SUPERUSER_EMAIL=admin@example.com -SUPERUSER_NAME=admin -SUPERUSER_PASSWORD=admin + +# Needed for Postgres should match the values for Nautobot above +PGPASSWORD=decinablesprewad +POSTGRES_DB=nautobot +POSTGRES_PASSWORD=decinablesprewad +POSTGRES_USER=nautobot + +# Needed for MARIADB should match the values for Nautobot above +MARIADB_ROOT_PASSWORD=decinablesprewad +MARIADB_DATABASE=nautobot +MARIADB_PASSWORD=decinablesprewad +MARIADB_USER=nautobot + +# Needed for Redis should match the values for Nautobot above +REDIS_PASSWORD=decinablesprewad + +# Needed for Selenium integration tests +NAUTOBOT_SELENIUM_URL=http://selenium:4444/wd/hub # WebDriver (Selenium client) +NAUTOBOT_SELENIUM_HOST=nautobot # LiveServer (Nautobot server) + # Golden Configuration specific PER_FEATURE_WIDTH=13 PER_FEATURE_HEIGHT=4 diff --git a/development/docker-compose.mysql.yml b/development/docker-compose.mysql.yml new file mode 100644 index 00000000..02966e34 --- /dev/null +++ b/development/docker-compose.mysql.yml @@ -0,0 +1,10 @@ +--- +services: + db: + image: "mariadb:10.6" + env_file: + - "dev.env" + volumes: + - "mysqldata_nautobot_golden_config:/var/lib/mysql" +volumes: + mysqldata_nautobot_golden_config: # yamllint disable-line rule:empty-values diff --git a/development/docker-compose.yml b/development/docker-compose.yml index 2c3e7f20..905bdcd5 100644 --- a/development/docker-compose.yml +++ b/development/docker-compose.yml @@ -1,4 +1,3 @@ - --- x-nautobot-build: &nautobot-build build: @@ -23,37 +22,33 @@ services: ports: - "8080:8080" depends_on: - - "postgres" + - "db" - "redis" <<: *nautobot-build <<: *nautobot-base - worker: - entrypoint: "nautobot-server rqworker" + celery: volumes: - "./nautobot_config.py:/opt/nautobot/nautobot_config.py" - "../:/source" - depends_on: - - "nautobot" healthcheck: disable: true + depends_on: + - "nautobot" + entrypoint: "nautobot-server celery worker -B -l INFO" <<: *nautobot-base - postgres: - image: "postgres:13-alpine" + db: + image: "postgres:13" env_file: - "dev.env" volumes: - "pgdata_nautobot_golden_config:/var/lib/postgresql/data" - ports: - - "5432:5432" redis: image: "redis:6-alpine" command: - "sh" - - "-c" # this is to evaluate the $REDIS_PASSWORD from the env + - "-c" # this is to evaluate the $REDIS_PASSWORD from the env - "redis-server --appendonly yes --requirepass $$REDIS_PASSWORD" env_file: - "dev.env" - ports: - - "6379:6379" volumes: - pgdata_nautobot_golden_config: # yamllint disable-line rule:empty-values \ No newline at end of file + pgdata_nautobot_golden_config: # yamllint disable-line rule:empty-values diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 26b0791b..4c75b82c 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -1,291 +1,129 @@ -"""Nautobot config.""" +"""Nautobot development configuration file.""" # pylint: disable=invalid-envvar-default -######################### -# # -# Required settings # -# # -######################### - import os import sys -from distutils.util import strtobool -from django.core.exceptions import ImproperlyConfigured -from nautobot.core.settings import * # noqa: F401,F403 #pylint: disable=wildcard-import, unused-wildcard-import - -# Enforce required configuration parameters -for key in [ - "ALLOWED_HOSTS", - "POSTGRES_DB", - "POSTGRES_USER", - "POSTGRES_HOST", - "POSTGRES_PASSWORD", - "REDIS_HOST", - "REDIS_PASSWORD", - "SECRET_KEY", -]: - if not os.environ.get(key): - raise ImproperlyConfigured(f"Required environment variable {key} is missing.") - - -def is_truthy(arg): - """Convert "truthy" strings into Booleans. - - Examples: - >>> is_truthy('yes') - True +from nautobot.core.settings import * # noqa: F403 +from nautobot.core.settings_funcs import is_truthy, parse_redis_connection - Args: - arg (str): Truthy string (True values are y, yes, t, true, on and 1; false values are n, no, - f, false, off and 0. Raises ValueError if val is anything else. - """ - if isinstance(arg, bool): - return arg - return bool(strtobool(arg)) +# +# Misc. settings +# -TESTING = len(sys.argv) > 1 and sys.argv[1] == "test" +ALLOWED_HOSTS = os.getenv("NAUTOBOT_ALLOWED_HOSTS", "").split(" ") +HIDE_RESTRICTED_UI = os.getenv("HIDE_RESTRICTED_UI", False) +SECRET_KEY = os.getenv("NAUTOBOT_SECRET_KEY", "") -# This is a list of valid fully-qualified domain names (FQDNs) for the Nautobot server. Nautobot will not permit write -# access to the server via any other hostnames. The first FQDN in the list will be treated as the preferred name. # -# Example: ALLOWED_HOSTS = ['nautobot.example.com', 'nautobot.internal.local'] -ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS").split(" ") +# Databases +# -# PostgreSQL database configuration. See the Django documentation for a complete list of available parameters: -# https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = { "default": { - "NAME": os.getenv("POSTGRES_DB", "nautobot"), # Database name - "USER": os.getenv("POSTGRES_USER", ""), # Database username - "PASSWORD": os.getenv("POSTGRES_PASSWORD", ""), # Datbase password - "HOST": os.getenv("POSTGRES_HOST", "localhost"), # Database server - "PORT": os.getenv("POSTGRES_PORT", ""), # Database port (leave blank for default) - "CONN_MAX_AGE": os.getenv("POSTGRES_TIMEOUT", 300), # Database timeout - "ENGINE": "django.db.backends.postgresql", # Database driver (Postgres only supported!) + "NAME": os.getenv("NAUTOBOT_DB_NAME", "nautobot"), + "USER": os.getenv("NAUTOBOT_DB_USER", ""), + "PASSWORD": os.getenv("NAUTOBOT_DB_PASSWORD", ""), + "HOST": os.getenv("NAUTOBOT_DB_HOST", "localhost"), + "PORT": os.getenv("NAUTOBOT_DB_PORT", ""), + "CONN_MAX_AGE": int(os.getenv("NAUTOBOT_DB_TIMEOUT", 300)), + "ENGINE": os.getenv("NAUTOBOT_DB_ENGINE", "django.db.backends.postgresql"), } } -# Nautobot uses RQ for task scheduling. These are the following defaults. -# For detailed configuration see: https://github.com/rq/django-rq#installation -RQ_QUEUES = { - "default": { - "HOST": os.getenv("REDIS_HOST", "localhost"), - "PORT": os.getenv("REDIS_PORT", 6379), - "DB": 0, - "PASSWORD": os.getenv("REDIS_PASSWORD", ""), - "SSL": os.getenv("REDIS_SSL", False), - "DEFAULT_TIMEOUT": 300, - }, - "webhooks": { - "HOST": os.getenv("REDIS_HOST", "localhost"), - "PORT": os.getenv("REDIS_PORT", 6379), - "DB": 0, - "PASSWORD": os.getenv("REDIS_PASSWORD", ""), - "SSL": os.getenv("REDIS_SSL", False), - "DEFAULT_TIMEOUT": 300, - }, - "custom_fields": { - "HOST": os.getenv("REDIS_HOST", "localhost"), - "PORT": os.getenv("REDIS_PORT", 6379), - "DB": 0, - "PASSWORD": os.getenv("REDIS_PASSWORD", ""), - "SSL": os.getenv("REDIS_SSL", False), - "DEFAULT_TIMEOUT": 300, - }, - # "with-sentinel": { - # "SENTINELS": [ - # ("mysentinel.redis.example.com", 6379) - # ("othersentinel.redis.example.com", 6379) - # ], - # "MASTER_NAME": 'nautobot", - # "DB": 0, - # "PASSWORD": "", - # "SOCKET_TIMEOUT": None, - # 'CONNECTION_KWARGS': { - # 'socket_connect_timeout': 10, - # }, - # }, - "check_releases": { - "HOST": os.getenv("REDIS_HOST", "localhost"), - "PORT": os.getenv("REDIS_PORT", 6379), - "DB": 0, - "PASSWORD": os.getenv("REDIS_PASSWORD", ""), - "SSL": os.getenv("REDIS_SSL", False), - "DEFAULT_TIMEOUT": 300, - }, -} - -# Nautobot uses Cacheops for database query caching. These are the following defaults. -# For detailed configuration see: https://github.com/Suor/django-cacheops#setup -REDIS_HOST = os.getenv("REDIS_HOST", "localhost") -REDIS_PORT = os.getenv("REDIS_PORT", 6379) -REDIS_PASS = os.getenv("REDIS_PASSWORD", "") -CACHEOPS_REDIS = f"redis://:{REDIS_PASS}@{REDIS_HOST}:{REDIS_PORT}/1" - -# This key is used for secure generation of random numbers and strings. It must never be exposed outside of this file. -# For optimal security, SECRET_KEY should be at least 50 characters in length and contain a mix of letters, numbers, and -# symbols. Nautobot will not run without this defined. For more information, see -# https://docs.djangoproject.com/en/stable/ref/settings/#std:setting-SECRET_KEY -SECRET_KEY = os.environ["SECRET_KEY"] - - -######################### -# # -# Optional settings # -# # -######################### - -# Specify one or more name and email address tuples representing Nautobot administrators. These people will be notified of -# application errors (assuming correct email settings are provided). -ADMINS = [ - # ['John Doe', 'jdoe@example.com'], -] - -# URL schemes that are allowed within links in Nautobot -ALLOWED_URL_SCHEMES = ( - "file", - "ftp", - "ftps", - "http", - "https", - "irc", - "mailto", - "sftp", - "ssh", - "tel", - "telnet", - "tftp", - "vnc", - "xmpp", -) - -# Optionally display a persistent banner at the top and/or bottom of every page. HTML is allowed. To display the same -# content in both banners, define BANNER_TOP and set BANNER_BOTTOM = BANNER_TOP. -BANNER_TOP = os.environ.get("BANNER_TOP", "") -BANNER_BOTTOM = os.environ.get("BANNER_BOTTOM", "") - -# Text to include on the login page above the login form. HTML is allowed. -BANNER_LOGIN = os.environ.get("BANNER_LOGIN", "") - -# Base URL path if accessing Nautobot within a directory. For example, if installed at https://example.com/nautobot/, set: -# BASE_PATH = 'nautobot/' -BASE_PATH = os.environ.get("BASE_PATH", "") - -# Cache timeout in seconds. Cannot be 0. Defaults to 900 (15 minutes). To disable caching, set CACHEOPS_ENABLED to False -CACHEOPS_DEFAULTS = {"timeout": 900} - -# Set to False to disable caching with cacheops. (Default: True) -CACHEOPS_ENABLED = True - -# Maximum number of days to retain logged changes. Set to 0 to retain changes indefinitely. (Default: 90) -CHANGELOG_RETENTION = int(os.environ.get("CHANGELOG_RETENTION", 90)) - -# If True, all origins will be allowed. Other settings restricting allowed origins will be ignored. -# Defaults to False. Setting this to True can be dangerous, as it allows any website to make -# cross-origin requests to yours. Generally you'll want to restrict the list of allowed origins with -# CORS_ALLOWED_ORIGINS or CORS_ALLOWED_ORIGIN_REGEXES. -CORS_ORIGIN_ALLOW_ALL = is_truthy(os.environ.get("CORS_ORIGIN_ALLOW_ALL", False)) - -# A list of origins that are authorized to make cross-site HTTP requests. Defaults to []. -CORS_ALLOWED_ORIGINS = [ - # 'https://hostname.example.com', -] +# +# Debug +# -# A list of strings representing regexes that match Origins that are authorized to make cross-site -# HTTP requests. Defaults to []. -CORS_ALLOWED_ORIGIN_REGEXES = [ - # r'^(https?://)?(\w+\.)?example\.com$', -] +DEBUG = True -# The file path where jobs will be stored. A trailing slash is not needed. Note that the default value of -# this setting is inside the invoking user's home directory. -# JOBS_ROOT = os.path.expanduser('~/.nautobot/jobs') +# Django Debug Toolbar +DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda _request: DEBUG and not TESTING} -# Set to True to enable server debugging. WARNING: Debugging introduces a substantial performance penalty and may reveal -# sensitive information about your installation. Only enable debugging while performing testing. Never enable debugging -# on a production system. -DEBUG = is_truthy(os.environ.get("DEBUG", False)) +if "debug_toolbar" not in INSTALLED_APPS: # noqa: F405 + INSTALLED_APPS.append("debug_toolbar") # noqa: F405 +if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa: F405 + MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa: F405 -# Enforcement of unique IP space can be toggled on a per-VRF basis. To enforce unique IP space -# within the global table (all prefixes and IP addresses not assigned to a VRF), set -# ENFORCE_GLOBAL_UNIQUE to True. -ENFORCE_GLOBAL_UNIQUE = is_truthy(os.environ.get("ENFORCE_GLOBAL_UNIQUE", False)) +# +# Logging +# -# Exempt certain models from the enforcement of view permissions. Models listed here will be viewable by all users and -# by anonymous users. List models in the form `.`. Add '*' to this list to exempt all models. -EXEMPT_VIEW_PERMISSIONS = [ - # 'dcim.site', - # 'dcim.region', - # 'ipam.prefix', -] +LOG_LEVEL = "DEBUG" if DEBUG else "INFO" -# HTTP proxies Nautobot should use when sending outbound HTTP requests (e.g. for webhooks). -# HTTP_PROXIES = { -# 'http': 'http://10.10.1.10:3128', -# 'https': 'http://10.10.1.10:1080', -# } +TESTING = len(sys.argv) > 1 and sys.argv[1] == "test" -# IP addresses recognized as internal to the system. The debugging toolbar will be available only to clients accessing -# Nautobot from an internal IP. -INTERNAL_IPS = ("127.0.0.1", "::1") +# Verbose logging during normal development operation, but quiet logging during unit test execution +if not TESTING: + LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "normal": { + "format": "%(asctime)s.%(msecs)03d %(levelname)-7s %(name)s :\n %(message)s", + "datefmt": "%H:%M:%S", + }, + "verbose": { + "format": "%(asctime)s.%(msecs)03d %(levelname)-7s %(name)-20s %(filename)-15s %(funcName)30s() :\n %(message)s", + "datefmt": "%H:%M:%S", + }, + }, + "handlers": { + "normal_console": { + "level": "INFO", + "class": "rq.utils.ColorizingStreamHandler", + "formatter": "normal", + }, + "verbose_console": { + "level": "DEBUG", + "class": "rq.utils.ColorizingStreamHandler", + "formatter": "verbose", + }, + }, + "loggers": { + "django": {"handlers": ["normal_console"], "level": "INFO"}, + "nautobot": { + "handlers": ["verbose_console" if DEBUG else "normal_console"], + "level": LOG_LEVEL, + }, + "rq.worker": { + "handlers": ["verbose_console" if DEBUG else "normal_console"], + "level": LOG_LEVEL, + }, + }, + } -LOG_LEVEL = os.environ.get("LOG_LEVEL", "DEBUG" if DEBUG else "INFO") +# +# Redis +# -# Enable custom logging. Please see the Django documentation for detailed guidance on configuring custom logs: -# https://docs.djangoproject.com/en/stable/topics/logging/ -LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "verbose": { - "format": "{asctime} {levelname} {message} - {name} - {module} - {pathname}:{lineno}", - "datefmt": "%H:%M:%S", - "style": "{", +# The django-redis cache is used to establish concurrent locks using Redis. The +# django-rq settings will use the same instance/database by default. +# +# This "default" server is now used by RQ_QUEUES. +# >> See: nautobot.core.settings.RQ_QUEUES +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": parse_redis_connection(redis_database=0), + "TIMEOUT": 300, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", }, - }, - "handlers": {"console": {"level": "DEBUG", "class": "rq.utils.ColorizingStreamHandler", "formatter": "verbose"}}, - "root": {"handlers": ["console"], "level": LOG_LEVEL}, + } } -# Setting this to True will display a "maintenance mode" banner at the top of every page. -MAINTENANCE_MODE = False - -# An API consumer can request an arbitrary number of objects =by appending the "limit" parameter to the URL (e.g. -# "?limit=1000"). This setting defines the maximum limit. Setting it to 0 or None will allow an API consumer to request -# all objects by specifying "?limit=0". -MAX_PAGE_SIZE = int(os.environ.get("MAX_PAGE_SIZE", 1000)) - -# The file path where uploaded media such as image attachments are stored. A trailing slash is not needed. Note that -# the default value of this setting is within the invoking user's home directory -# MEDIA_ROOT = os.path.expanduser('~/.nautobot/media') - -# By default uploaded media is stored on the local filesystem. Using Django-storages is also supported. Provide the -# class path of the storage driver in STORAGE_BACKEND and any configuration options in STORAGE_CONFIG. For example: -# STORAGE_BACKEND = 'storages.backends.s3boto3.S3Boto3Storage' -# STORAGE_CONFIG = { -# 'AWS_ACCESS_KEY_ID': 'Key ID', -# 'AWS_SECRET_ACCESS_KEY': 'Secret', -# 'AWS_STORAGE_BUCKET_NAME': 'nautobot', -# 'AWS_S3_REGION_NAME': 'eu-west-1', -# } - -# Expose Prometheus monitoring metrics at the HTTP endpoint '/metrics' -METRICS_ENABLED = False - -# Credentials that Nautobot will uses to authenticate to devices when connecting via NAPALM. -NAPALM_USERNAME = os.environ.get("NAPALM_USERNAME", "") -NAPALM_PASSWORD = os.environ.get("NAPALM_PASSWORD", "") +# RQ_QUEUES is not set here because it just uses the default that gets imported +# up top via `from nautobot.core.settings import *`. -# NAPALM timeout (in seconds). (Default: 30) -NAPALM_TIMEOUT = int(os.environ.get("NAPALM_TIMEOUT", 30)) +# Redis Cacheops +CACHEOPS_REDIS = parse_redis_connection(redis_database=1) -# NAPALM optional arguments (see https://napalm.readthedocs.io/en/latest/support/#optional-arguments). Arguments must -# be provided as a dictionary. -NAPALM_ARGS = {} +# +# Celery settings are not defined here because they can be overloaded with +# environment variables. By default they use `CACHES["default"]["LOCATION"]`. +# -# Determine how many objects to display per page within a list. (Default: 50) -PAGINATE_COUNT = int(os.environ.get("PAGINATE_COUNT", 50)) # Enable installed plugins. Add the name of each plugin to the list. PLUGINS = ["nautobot_plugin_nornir", "nautobot_golden_config"] @@ -315,67 +153,3 @@ def is_truthy(arg): "sot_agg_transposer": os.environ.get("SOT_AGG_TRANSPOSER"), }, } - -# When determining the primary IP address for a device, IPv6 is preferred over IPv4 by default. Set this to True to -# prefer IPv4 instead. -PREFER_IPV4 = is_truthy(os.environ.get("PREFER_IPV4", False)) - -# Rack elevation size defaults, in pixels. For best results, the ratio of width to height should be roughly 10:1. -RACK_ELEVATION_DEFAULT_UNIT_HEIGHT = 22 -RACK_ELEVATION_DEFAULT_UNIT_WIDTH = 220 - -# Remote authentication support -REMOTE_AUTH_ENABLED = False -REMOTE_AUTH_BACKEND = "nautobot.core.authentication.RemoteUserBackend" -REMOTE_AUTH_HEADER = "HTTP_REMOTE_USER" -REMOTE_AUTH_AUTO_CREATE_USER = True -REMOTE_AUTH_DEFAULT_GROUPS = [] -REMOTE_AUTH_DEFAULT_PERMISSIONS = {} - -# This determines how often the GitHub API is called to check the latest release of Nautobot. Must be at least 1 hour. -RELEASE_CHECK_TIMEOUT = 24 * 3600 - -# This repository is used to check whether there is a new release of Nautobot available. Set to None to disable the -# version check or use the URL below to check for release in the official Nautobot repository. -RELEASE_CHECK_URL = None -# RELEASE_CHECK_URL = 'https://api.github.com/repos/nautobot/nautobot/releases' - -# Maximum execution time for background tasks, in seconds. -RQ_DEFAULT_TIMEOUT = 300 - -# The length of time (in seconds) for which a user will remain logged into the web UI before being prompted to -# re-authenticate. (Default: 1209600 [14 days]) -SESSION_COOKIE_AGE = 1209600 # 2 weeks, in seconds - - -# By default, Nautobot will store session data in the database. Alternatively, a file path can be specified here to use -# local file storage instead. (This can be useful for enabling authentication on a standby instance with read-only -# database access.) Note that the user as which Nautobot runs must have read and write permissions to this path. -SESSION_FILE_PATH = None - -# Configure SSO, for more information see docs/configuration/authentication/sso.md -SOCIAL_AUTH_ENABLED = False - -# Time zone (default: UTC) -TIME_ZONE = os.environ.get("TIME_ZONE", "UTC") - -# Date/time formatting. See the following link for supported formats: -# https://docs.djangoproject.com/en/stable/ref/templates/builtins/#date -DATE_FORMAT = os.environ.get("DATE_FORMAT", "N j, Y") -SHORT_DATE_FORMAT = os.environ.get("SHORT_DATE_FORMAT", "Y-m-d") -TIME_FORMAT = os.environ.get("TIME_FORMAT", "g:i a") -SHORT_TIME_FORMAT = os.environ.get("SHORT_TIME_FORMAT", "H:i:s") -DATETIME_FORMAT = os.environ.get("DATETIME_FORMAT", "N j, Y g:i a") -SHORT_DATETIME_FORMAT = os.environ.get("SHORT_DATETIME_FORMAT", "Y-m-d H:i") - -# A list of strings designating all applications that are enabled in this Django installation. Each string should be a dotted Python path to an application configuration class (preferred), or a package containing an application. -# https://nautobot.readthedocs.io/en/latest/configuration/optional-settings/#extra-applications -EXTRA_INSTALLED_APPS = os.environ["EXTRA_INSTALLED_APPS"].split(",") if os.environ.get("EXTRA_INSTALLED_APPS") else [] - -# Django Debug Toolbar -DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda _request: DEBUG and not TESTING} - -if "debug_toolbar" not in EXTRA_INSTALLED_APPS: - EXTRA_INSTALLED_APPS.append("debug_toolbar") -if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: - MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") diff --git a/docs/FAQ.md b/docs/FAQ.md index 830e2b4f..1d3df7ed 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -67,7 +67,7 @@ map multiple keys to a single netutils expected key. } ``` -_What are the supported platforms for Backup and Intended Configuratino jobs? How do I configure a device with a specific OS?_ +_What are the supported platforms for Backup and Intended Configuration jobs? How do I configure a device with a specific OS?_ The current supported platform and the associated *default* platform slug names are the following for: @@ -77,8 +77,9 @@ The current supported platform and the associated *default* platform slug names * cisco_nxos * juniper_junos -In many use cases, this can be extended with a custom dispatcher for nornir tasks, which is controlled in the [nornir-nautobot](https://github.com/nautobot/nornir-nautobot) repository. Additionally the [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) provide the ability to leverage the -`dispatcher_mapping` configuration parameter to map and/or extend for your environment. Please see the instructions there for further details. +In many use cases, this can be extended with a custom dispatcher for nornir tasks, which is controlled in the [nornir-nautobot](https://github.com/nautobot/nornir-nautobot) +repository. Additionally the [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) provide the ability to leverage the `dispatcher_mapping` +configuration parameter to map and/or extend for your environment. Please see the instructions there for further details. _Why not provide the corrective configurations?_ diff --git a/docs/img/00-navigating-compliance-json.png b/docs/img/00-navigating-compliance-json.png new file mode 100644 index 00000000..a74ce4fc Binary files /dev/null and b/docs/img/00-navigating-compliance-json.png differ diff --git a/docs/img/01-navigating-compliance-json.png b/docs/img/01-navigating-compliance-json.png new file mode 100644 index 00000000..a2221cad Binary files /dev/null and b/docs/img/01-navigating-compliance-json.png differ diff --git a/docs/img/02-navigating-compliance-json.png b/docs/img/02-navigating-compliance-json.png new file mode 100644 index 00000000..8bb16b1c Binary files /dev/null and b/docs/img/02-navigating-compliance-json.png differ diff --git a/docs/img/03-navigating-compliance-json.png b/docs/img/03-navigating-compliance-json.png new file mode 100644 index 00000000..59098129 Binary files /dev/null and b/docs/img/03-navigating-compliance-json.png differ diff --git a/docs/img/04-navigating-compliance-json.png b/docs/img/04-navigating-compliance-json.png new file mode 100644 index 00000000..0ad2ed8c Binary files /dev/null and b/docs/img/04-navigating-compliance-json.png differ diff --git a/docs/img/05-navigating-compliance-json.png b/docs/img/05-navigating-compliance-json.png new file mode 100644 index 00000000..011d3ac7 Binary files /dev/null and b/docs/img/05-navigating-compliance-json.png differ diff --git a/docs/img/06-navigating-compliance-json.png b/docs/img/06-navigating-compliance-json.png new file mode 100644 index 00000000..b208e200 Binary files /dev/null and b/docs/img/06-navigating-compliance-json.png differ diff --git a/docs/img/07-navigating-compliance-json.png b/docs/img/07-navigating-compliance-json.png new file mode 100644 index 00000000..dc897990 Binary files /dev/null and b/docs/img/07-navigating-compliance-json.png differ diff --git a/docs/img/08-navigating-compliance-json.png b/docs/img/08-navigating-compliance-json.png new file mode 100644 index 00000000..ce21bddb Binary files /dev/null and b/docs/img/08-navigating-compliance-json.png differ diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 00000000..a5c355c9 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,64 @@ +# Installation + +Plugins can be installed manually or use Python's `pip`. See the [nautobot documentation](https://nautobot.readthedocs.io/en/latest/plugins/#install-the-package) for more details. The pip package name for this plugin is [`nautobot-golden-config`](https://pypi.org/project/nautobot-golden-config/) + +> The plugin is compatible with Nautobot 1.0.0 and higher + +**Prerequisite:** The plugin relies on [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) to be installed and both plugins to be enabled in your configuration settings. + +**Required:** The following block of code below shows the additional configuration required to be added to your `nautobot_config.py` file: +- append `"nautobot_golden_config"` to the `PLUGINS` list +- append the `"nautobot_golden_config"` dictionary to the `PLUGINS_CONFIG` dictionary + +```python +PLUGINS = ["nautobot_plugin_nornir", "nautobot_golden_config"] + +PLUGINS_CONFIG = { + "nautobot_plugin_nornir": { + "nornir_settings": { + "credentials": "nautobot_plugin_nornir.plugins.credentials.env_vars.CredentialsEnvVars", + "runner": { + "plugin": "threaded", + "options": { + "num_workers": 20, + }, + }, + }, + }, + "nautobot_golden_config": { + "per_feature_bar_width": 0.15, + "per_feature_width": 13, + "per_feature_height": 4, + "enable_backup": True, + "enable_compliance": True, + "enable_intended": True, + "enable_sotagg": True, + "sot_agg_transposer": None, + "platform_slug_map": None, + }, +} + +``` + +## Plugin Settings + +The plugin behavior can be controlled with the following list of settings. + +* The `enable_backup`, `enable_compliance`, `enable_intended`, and `enable_sotagg` will toggle inclusion of the entire component. + + +| Key | Example | Default | Description | +| ------- | ------ | -------- | ------------------------------------- | +| enable_backup | True | True | A boolean to represent whether or not to run backup configurations within the plugin. | +| enable_compliance | True | True | A boolean to represent whether or not to run the compliance process within the plugin. | +| enable_intended | True | True | A boolean to represent whether or not to generate intended configurations within the plugin. | +| enable_sotagg | True | True | A boolean to represent whether or not to provide a GraphQL query per device to allow the intended configuration to provide data variables to the plugin. | +| platform_slug_map | {"cisco_wlc": "cisco_aireos"} | None | A dictionary in which the key is the platform slug and the value is what netutils uses in any "network_os" parameter. | +| sot_agg_transposer | mypkg.transposer | - | A string representation of a function that can post-process the graphQL data. | +| per_feature_bar_width | 0.15 | 0.15 | The width of the table bar within the overview report | +| per_feature_width | 13 | 13 | The width in inches that the overview table can be. | +| per_feature_height | 4 | 4 | The height in inches that the overview table can be. | + +> Note: Over time the intention is to make the compliance report more dynamic, but for now allow users to configure the `per_*` configs in a way that fits best for them. + +> Note: Review [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) for Nornir and dispatcher configuration options. diff --git a/docs/navigating-backup.md b/docs/navigating-backup.md index 44881068..6b8b5627 100644 --- a/docs/navigating-backup.md +++ b/docs/navigating-backup.md @@ -9,7 +9,7 @@ and save the configuration. The high-level process to run backups is: * Store the backup configurations locally. * Push configurations to the remote Git repository. -# Configuration Backup Settings +## Configuration Backup Settings Backup configurations often need some amount of parsing to stay sane. The two obvious use cases are the ability to remove lines such as the "Last Configuration" changed date, as this will cause unnecessary changes the second is to strip out secrets from the configuration. In an effort to support these @@ -41,7 +41,16 @@ The credentials/secrets management is further described within the [nautbot-plug repo. For the simplist use case you can set environment variables for `NAPALM_USERNAME`, `NAPALM_PASSWORD`, and `DEVICE_SECRET`. For more complicated use cases, please refer to the plugin documentation linked above. -# Config Removals +## Starting a Backup Job + +To start a backup job manually: + +1. Navigate to the Plugin Home (Plugins->Home), with Home being in the `Golden Configuration` section +2. Select _Execute_ on the upper right buttons, then _Backup_ +3. Fill in the data that you wish to have backed up +4. Select _Run Job_ + +## Config Removals The line removals settings is a series of regex patterns to identify lines that should be removed. This is helpful as there are usually parts of the configurations that will change each time. A match simply means to remove. @@ -51,7 +60,7 @@ In order to specify line removals. Navigate to **Plugins -> Config Removals**. The remove setting is based on `Platform`. An example is shown below. ![Config Removals View](./img/00-navigating-backup.png) -# Config Replacements +## Config Replacements This is a replacement config with a regex pattern with a single capture groups to replace. This is helpful to strip out secrets. diff --git a/docs/navigating-compliance-json.md b/docs/navigating-compliance-json.md new file mode 100644 index 00000000..6ddf7b8a --- /dev/null +++ b/docs/navigating-compliance-json.md @@ -0,0 +1,81 @@ +# Navigating Compliance Using Structured Data + +> Warning: +This feature is in early development and is only implemented within the API. There are multiple caveats that need to be considered and they are listed below. + +## Caveats +- Must have a `Platform` created specifically for the feature. +- The compliance `rule` need to be unique for the JSON `config-type`. +- When creating `Compliance Rules` with the config type of JSON, the `config to match` field is not used it should be left blank. +- Does **NOT** use Git repositories for backup and intended configurations. +- Mixing/Matching CLI and JSON compliance rules is **NOT** supported. A device should only have compliance rules from one or the other, and it is up to the operator to enforce. + +## Quick Start Guide + +1. To get started with the structured data(JSON) compliance check the first step is to create a platform. + +![Example Platform Creation](./img/00-navigating-compliance-json.png) + +2. Next, the compliance feature needs to be created, the feature can be shared between a CLI rule and a JSON rule. + +![Example Feature Creation](./img/01-navigating-compliance-json.png) + +3. Link the feature that was just created to a rule definition. +![Example Rule Creation](./img/02-navigating-compliance-json.png) + +4. Now that the definitions are created and the rule is mapped to the platform we created the API execution can be performed. + +Here is an example of a `POST` to the `/plugins​/golden-config​/config-compliance​/` endpoint. + +```json +{ + "actual": {"aaa": {"servers": ["1.1.1.1"]}}, + "intended": {"aaa": {"servers": ["1.1.1.1"]}}, + "device": "472c3786-4499-4e34-8d8e-291c4c7f32a9", + "rule": "87cc003b-9025-472d-bf3c-e5b62bf91987" +} +``` + +> Notice: The UUID is needed for the device and rule to execute against. This data can be found in the UI or by querying the proper endpoints, `/plugins/golden-config/compliance-rule/` and `/dcim​/devices​/`. + +Once the API call is made the response data provides a quick snapshot. +```json +{ + "id": "be35a701-0cb0-418a-98e7-a82d99db857e", + "device": "472c3786-4499-4e34-8d8e-291c4c7f32a9", + "rule": "87cc003b-9025-472d-bf3c-e5b62bf91987", + "actual": {"aaa": {"servers": ["1.1.1.1"]}}, + "intended": {"aaa": {"servers": ["1.1.1.1"]}}, + "ordered": true, + "compliance": true, + "display": "nautobot-iosv-1 -> iosv-json - aaa-json -> True" +} +``` + +You can also see the compliance data in the UI once it is created via API. + +In the navigation menu: `Plugins -> Configuration Compliance`. + +![Example Compliance Run in UI](./img/03-navigating-compliance-json.png) + +With the detailed view: + +![Example Compliance Run in UI Detail](./img/04-navigating-compliance-json.png) + +Example of a Non-Compliant rule: + +![Example Non-Compliant Run in UI Detail](./img/05-navigating-compliance-json.png) + +> Note: +All subsequent updates to the compliance rule need to use the `PUT` or `PATCH` API methods and they need to provide the UUID that was created for the specific compliance execution. This UUID is generated by (unique_together = ("device", "rule")) + +![Example of Diff Icon](./img/06-navigating-compliance-json.png) + +Seeing the diff button alone will **only** show up for devices using JSON compliance rules. + + +![Show Updated Overview](./img/07-navigating-compliance-json.png) + +The detailed diff view will show a side by side diff, this looks the same as the CLI view. + +![Detail Diff View](./img/08-navigating-compliance-json.png) diff --git a/docs/navigating-compliance.md b/docs/navigating-compliance.md index cc188394..54ffbff1 100644 --- a/docs/navigating-compliance.md +++ b/docs/navigating-compliance.md @@ -1,5 +1,8 @@ # Configuration Compliance +> Note: +This document provides instructions based on `CLI` based compliance. The other option is `JSON` [structured data compliance](./navigating-compliance-json.md). + The following should be noted by what is meant by configuration compliance. Configurations are considered to be compliant if the generated configuration (generally by merging data and Jinja2, will be referred to as the intended configuration from hence forth) matches "exactly" as the actual configuration is on the backup. This may confusing to some, as for example to the average network engineer, there is no difference between `int g0/0` and @@ -17,7 +20,7 @@ There is no magic to determine the state of configuration. You still must define configuration may be as a network engineer wants it, but the tool correctly considers it non-compliant, since the tool is only comparing two configurations. The tool makes no assumptions to determine what an engineer may want to do, but did not document via the configuration generation process. -# Compliance Configuration Settings +## Compliance Configuration Settings In order to generate the intended configurations two repositories are needed. @@ -26,7 +29,16 @@ In order to generate the intended configurations two repositories are needed. 3. The [intended_path_template](./navigating-golden.md#application-settings) configuration parameter. 4. The [backup_path_template](./navigating-golden.md#application-settings) configuration parameter. -# Configuration Compliance Parsing Engine +## Starting a Compliance Job + +To start a compliance job manually: + +1. Navigate to the Plugin Home (Plugins->Home), with Home being in the `Golden Configuration` section +2. Select _Execute_ on the upper right buttons, then _Compliance_ +3. Fill in the data that you wish to have a compliance report generated for +4. Select _Run Job_ + +## Configuration Compliance Parsing Engine Configuration compliance is different than a simple UNIX diff. While the UI provides both, the compliance metrics are not influenced by the UNIX diff capabilities. One of the challenges of getting a device into compliance is the ramp up it takes to model and generate configurations for an entire @@ -96,7 +108,7 @@ router bgp 65250 > Note: A platform will not run successfully against a device unless at least one compliance rule is set. -# Configuration Compliance Settings +## Configuration Compliance Settings Configuration compliance requires the Git Repo settings for `config backups` and `intended configs`--which are covered in their respective sections--regardless if they are actually managed via the plugin or not. The same is true for the `Backup Path` and `Intended Path`. @@ -122,12 +134,12 @@ what a line starts with only. Meaning, there is an implicit greediness to the ma > Note: The mapping of "network_os" as defined by netutils is provided via the plugin settings in your nautobot_config.py, and documented on the primary Readme. -# Compliance View +## Compliance View The compliance overview will provide a per device and feature overview on the compliance of your network devices. From here you can navigate to the details view. ![Compliance Overview](./img/compliance-overview.png) -# Compliance Details View +## Compliance Details View Drilling into a specific device and feature, you can get an immediate detailed understanding of your device. @@ -142,13 +154,13 @@ Please note the following about the compliance details page. * The icon next to the status will indicate whether or not the configuration is ordered. * The icons on top of the page can be used to help navigate the page easier. -# Supported Platforms +## Supported Platforms Platforms support technically come from the options provided by [nornir-nautobot](https://github.com/nautobot/nornir-nautobot) for nornir dispatcher tasks and [netutils](https://github.com/networktocode/netutils) for configuration compliance and parsing. However, for reference, the valid slug's of the platforms are provided in the [FAQ](./FAQ.md). -# Overview Report +## Overview Report There is a global overview or executive summary that provides a high level snapshot of the compliance. There are 3 points of data captured. @@ -156,14 +168,14 @@ There is a global overview or executive summary that provides a high level snaps * Features - This is the total number of features for all devices, and how many are compliant, and how many are non-compliant. * Per Feature - This is a breakdown of that feature and how many within that feature are compliant of not. -# Detail Report +## Detail Report This can be accessed via the Plugins drop-down via `Compliance` details button. From there you can filter the devices via the form on the right side, limit the columns with the `Configure` button, or bulk delete with the `Delete` button. Additionally each device is click-able to view the details of that individual device. You can configure the columns to limit how much is showing on one screen. -# Device Details +## Device Details You can get to the device details form either the Compliance details page, or there is a `content_template` on the device model page is Nautobot's core instance. diff --git a/docs/navigating-golden.md b/docs/navigating-golden.md index 8e0b9ca6..19fdcd61 100644 --- a/docs/navigating-golden.md +++ b/docs/navigating-golden.md @@ -2,7 +2,7 @@ A navigation overview of the entire plugin. -# Home +## Home The Home view is a portal to understand what the status of the devices are. @@ -24,7 +24,7 @@ Some of the information described in this view, may not be immediately obvious. The first four bring up a "modal" or "dialogue box" which has a detailed view for a dedicated page. The run job brings the user to a job to run all three components against all of the devices. -# Jobs +## Jobs There are a series of Jobs that are registered via the Plugin. They can be viewed from the standard Jobs view. @@ -34,7 +34,7 @@ Each Job attempts to provide sane error handling, and respects the `debug` flag ![Job Result](./img/job-result.png) -# Application Settings +## Application Settings The golden configuration plugin settings can be found by navigating to `Plugins -> Settings` button. Under the `Golden Configuration` section. @@ -55,7 +55,7 @@ To configure or update the settings click the pencil icon to edit. > Note: Each of these will be further detailed in their respective sections. -## Scope +### Scope The scope, is a JSON blob that describes a filter that will provide the list of devices to be allowed whenever a job is ran. A job can optionally further refine the scope, but the outbound would be based on what is defined here. The options are best described by leveraging the Devices list view, search features (the filtering shown on the side of the Devices.) Building a query there, will provide the exact keys expected. @@ -90,7 +90,7 @@ Adding a "has_primary_ip" check. When viewing the settings, the scope of devices is actually a link to the query built in the Devices view. Click that link to understand which devices are permitted by the filter. -# Git Settings +## Git Settings The plugin makes heavy use of the Nautobot git data sources feature. There are up to three repositories used in the application. This set of instructions will walk an operator through setting up the backup repository. The steps are the same, except for the "Provides" field name chosen. @@ -125,33 +125,33 @@ Once you click `Create` and the repository syncs, the main page will now show th For their respective features, the "Provides" field could be backup intended configs and jinja templates. -# Plugins Buttons +## Plugins Buttons The plugins buttons provides you with the ability to navigate to Run the script, overview report, and detailed report. -# Run Script +## Run Script This can be accessed via the Plugins drop-down via `Run Script` button of the `Home` view, the user will be provided a form of the Job (as described above), which will allow the user to limit the scope of the request. -# Device Template Content +## Device Template Content The plugin makes use of template content `right_page` in order to use display in-line the status of that device in the traditional Nautobot view. From here you can click the link to see the detail compliance view. -# Site Template Content +## Site Template Content The plugin makes use of template content `right_page` in order to use display in-line the status of that entire site in the traditional Nautobot view. -# API +## API To run the job programmactially, reference the [nautobot documentation](https://nautobot.readthedocs.io/en/stable/additional-features/jobs/#via-the-api) for the proper API call. Pay special attention to the `class_path` defintion. -# Feature Enablement +## Feature Enablement Enabling features such as backup or compliance, will render those parts of the UI visible. It is worth noting that disabling features does not provide any garbage collection and it is up to the operator to remove such data. -# Network Operating System Support +## Network Operating System Support The version of OS's supported is documented in the [FAQ](./FAQ.md) and is controlled the platform slug. The platform slug must be exactly as expected or leverage a configuration option--which is described the the FAQ--for the plugin to work. \ No newline at end of file diff --git a/docs/navigating-intended.md b/docs/navigating-intended.md index a0e21621..8d9fcbf1 100644 --- a/docs/navigating-intended.md +++ b/docs/navigating-intended.md @@ -1,4 +1,6 @@ -# Configuration Generation +# Intended Configuration + +## Configuration Generation The Golden Config plugin provides the ability to generate configurations. The process is a Nornir play that points to a single Jinja template per device that generates the configurations. Data is provided via the Source of Truth aggregation and is currently a hard requirement to be turned on if @@ -27,7 +29,16 @@ or {% endfor %} ``` -# Intended Configuration Settings +## Starting a Intended Configuration Job + +To start a intended configuration job manually: + +1. Navigate to the Plugin Home (Plugins->Home), with Home being in the `Golden Configuration` section +2. Select _Execute_ on the upper right buttons, then _Intended_ +3. Fill in the data that you wish to have configurations generated for up +4. Select _Run Job_ + +## Intended Configuration Settings In order to generate the intended configurations two repositories are needed. @@ -36,6 +47,6 @@ In order to generate the intended configurations two repositories are needed. 3. The [intended_path_template](./navigating-golden.md#application-settings) configuration parameter. 4. The [jinja_path_template](./navigating-golden.md#application-settings) configuration parameter. -# Data +## Data The data provided while rendering the configuration of a device is described in the [SoT Aggregation](./navigating-sot-agg.md) overview. diff --git a/docs/navigating-sot-agg.md b/docs/navigating-sot-agg.md index f6262a68..bc7e7d64 100644 --- a/docs/navigating-sot-agg.md +++ b/docs/navigating-sot-agg.md @@ -6,7 +6,7 @@ The Source of Truth Aggregation Overview is driven by a few key components. * The ability to modify data with a "transposer" function. * The usage of config contexts and the Nautobot's native git platform. -# GraphQL +## GraphQL There is currently support to make an arbitrary GraphQL query that has "device_id" as a variable. It is likely best to use the GraphiQL interface to model your data, and then save that query to the configuration. The application configuration ensures the following two components. @@ -19,7 +19,7 @@ It is worth noting that the graphQL query returned is modified to remove the roo It is helpful to make adjustments to the query, and then view the data from the Plugin's home page and clicking on a given device's `code-json` icon. -# Transposer Function +## Transposer Function The transposer function is an optional function to make arbitrary changes to the data after the fact. There is a Plugin configuration that allows the operator to point to a function within the python path by a string. The function will receive a single variable, that by convention should be called @@ -45,17 +45,17 @@ PLUGINS_CONFIG["nautobot_golden_config"]["sot_agg_transposer"] = "nautobot_golde ``` The path described must be within the Python path of your worker. It is up to the operator to ensure that happens. -# Config Context +## Config Context Outside of the scope of this document, but it is worth mentioning the power that configuration context's with integration to Git can provide in this solution. -# Performance +## Performance The GraphQL and transposer functionality could seriously impact the performance of the server. There are no restrictions imposed as it is up to the operator to weigh the pros and cons of the solution. -# Sample Query +## Sample Query To test your query in the GraphiQL UI, obtain a device's uuid, which can be seen in the url of the detailed device view. Once you have a valid device uuid, you can use the "Query Variables" portion of the UI, which is on the bottom left-hand side of the screen. diff --git a/docs/quick-start.md b/docs/quick-start.md new file mode 100644 index 00000000..5dd995ec --- /dev/null +++ b/docs/quick-start.md @@ -0,0 +1,110 @@ +# Quick Start Guides + +- [Backup Configuration](#backup-configuration) +- [Intended Configuration](#intended-configuration) +- [Compliance](#compliance) + +# Backup Configuration + +Follow the steps below to get up and running for the configuration backup element of the plugin. + +1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_backup": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. + +2. Add the git repository that will be used to house the backup configurations. + + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the backup. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **provides** called `backup configs`. + 4. Click Create. + +3. Next, make sure to update the Plugins **Settings** with the backup details. + + 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 2. Fill out the Backup Repository. (The dropdown will show the repository that was just created.) + 3. Fill out Backup Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) + 4. Select whether or not to do a connectivity check per device. + 5. Click Save. + +4. Create Configuration Removals and Replacements. + + 1. [Config Removals](./navigating-backup#config-removals) + 2. [Config Replacements](./navigating-backup#config-replacements) + +5. Execute the Backup. + + 1. Navigate to `Plugins -> Home`. + 2. Click on the `Execute` button and select `Backup`. + 3. Select what to run the backup on. + 4. Run the Job. + +> For in-depth details see [Navigating Backup](./navigating-backup.md) + +# Intended Configuration + +Follow the steps below to get up and running for the intended configuration element of the plugin. + +> Notice: Intended Configuration requires the `enable_intended` and `enabled_sotAgg` plugin features to be used. + +1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_intended": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. + +2. Add the git repository that will be used to house the intended configurations. + + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the intended. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **provides** called `intended configs`. + 4. Click Create. + +3. Add the git repository that will be used to house the Jinja2 templates. + + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the jinja2 templates. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **provides** called `jinja templates`. + 4. Click Create. + +4. Next, make sure to update the Plugins **Settings** with the intended and jinja2 template details. + + 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 2. Fill out the Intended Repository. (The dropdown will show the repository that was just created.) + 3. Fill out Intended Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) + 4. Fill out Jinja Repository. (The dropdown will show the repository that was just created.) + 5. Fill out Jinja Path Template. Typically `{{obj.platform.slug}}.j2`. + +4. Determine what data(variables) the Jinja2 templates need from Nautobot. + + 1. See [Source of Truth Agg Details](./navigating-sot-agg.md) + 2. Populate the SoTAgg field in the `Plugin -> Settings`. + +5. Execute the Intended. + + 1. Navigate to `Plugins -> Home`. + 2. Click on the `Execute` button and select `Intended`. + 3. Select what to run the intended generation on. + 4. Run the Job. + +> For in-depth details see [Navigating Intended](./navigating-intended.md) + +# Compliance + +Compliance requires Backups and Intended Configurations in order to be executed. + +1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_compliance": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. +2. Follow the steps in [Backup Configuration](#backup-configuration). +3. Follow the steps in [Intended Configuration](#intended-configuration). +4. Create a Compliance Feature. + + 1. Navigate to `Plugins -> Compliance Feature`. + 2. Click Add and give the feature a name. Typically this is based on the configuration snippet or section. E.g. "aaa". + +5. Create a Compliance Rule. + + 1. Navigate to `Plugins -> Compliance Rules`. + 2. Click Add and populate the fields, make sure the rule is linked to the feature created previously. See [Configuration Compliance Settings](./navigating-compliance.md#configuration-compliance-settings) for details. + +6. Execute Compliance Check. + + 1. Navigate to `Plugins -> Configuration Compliance`. + 2. Click on the `Execute` button and select `Compliance`. + 3. Select what to run the compliance on. + 4. Run the Job. + +> For in-depth details see [Navigating Compliance](./navigating-compliance.md) diff --git a/nautobot_golden_config/__init__.py b/nautobot_golden_config/__init__.py index 090c32c0..45fd3dd7 100644 --- a/nautobot_golden_config/__init__.py +++ b/nautobot_golden_config/__init__.py @@ -1,6 +1,6 @@ """Plugin declaration for nautobot_golden_config.""" -__version__ = "0.9.5" +__version__ = "0.9.6" from nautobot.extras.plugins import PluginConfig diff --git a/nautobot_golden_config/api/serializers.py b/nautobot_golden_config/api/serializers.py index 5f3b4baa..0546fc67 100644 --- a/nautobot_golden_config/api/serializers.py +++ b/nautobot_golden_config/api/serializers.py @@ -1,6 +1,7 @@ """REST API serializer capabilities for graphql plugin.""" # pylint: disable=too-many-ancestors from rest_framework import serializers + from nautobot.extras.api.customfields import CustomFieldModelSerializer from nautobot.extras.api.serializers import TaggedObjectSerializer @@ -42,12 +43,8 @@ class Meta: class ConfigComplianceSerializer(TaggedObjectSerializer, CustomFieldModelSerializer): """Serializer for ConfigCompliance object.""" - url = serializers.HyperlinkedIdentityField( - view_name="plugins-api:nautobot_golden_config-api:configcompliance-detail" - ) - class Meta: - """Set Meta Data for ConfigCompliance, will serialize all fields.""" + """Set Meta Data for ConfigCompliance, will serialize fields.""" model = models.ConfigCompliance fields = "__all__" diff --git a/nautobot_golden_config/filters.py b/nautobot_golden_config/filters.py index 722d4280..1bbd77bb 100644 --- a/nautobot_golden_config/filters.py +++ b/nautobot_golden_config/filters.py @@ -2,7 +2,7 @@ import django_filters -from django.db.models import Q, Subquery +from django.db.models import Q from nautobot.dcim.models import Device, Platform, Region, Site, DeviceRole, DeviceType, Manufacturer, RackGroup, Rack from nautobot.extras.models import Status @@ -22,37 +22,37 @@ class GoldenConfigFilter(CreatedUpdatedFilterSet): ) tenant_group_id = TreeNodeMultipleChoiceFilter( queryset=TenantGroup.objects.all(), - field_name="device__tenant__group", + field_name="tenant__group", lookup_expr="in", label="Tenant Group (ID)", ) tenant_group = TreeNodeMultipleChoiceFilter( queryset=TenantGroup.objects.all(), - field_name="device__tenant__group", + field_name="tenant__group", to_field_name="slug", lookup_expr="in", label="Tenant Group (slug)", ) tenant_id = django_filters.ModelMultipleChoiceFilter( queryset=Tenant.objects.all(), - field_name="device__tenant_id", + field_name="tenant_id", label="Tenant (ID)", ) tenant = django_filters.ModelMultipleChoiceFilter( queryset=Tenant.objects.all(), - field_name="device__tenant__slug", + field_name="tenant__slug", to_field_name="slug", label="Tenant (slug)", ) region_id = TreeNodeMultipleChoiceFilter( queryset=Region.objects.all(), - field_name="device__site__region", + field_name="site__region", lookup_expr="in", label="Region (ID)", ) region = TreeNodeMultipleChoiceFilter( queryset=Region.objects.all(), - field_name="device__site__region", + field_name="site__region", lookup_expr="in", to_field_name="slug", label="Region (slug)", @@ -62,40 +62,40 @@ class GoldenConfigFilter(CreatedUpdatedFilterSet): label="Site (ID)", ) site = django_filters.ModelMultipleChoiceFilter( - field_name="device__site__slug", + field_name="site__slug", queryset=Site.objects.all(), to_field_name="slug", label="Site name (slug)", ) rack_group_id = TreeNodeMultipleChoiceFilter( queryset=RackGroup.objects.all(), - field_name="device__rack__group", + field_name="rack__group", lookup_expr="in", label="Rack group (ID)", ) rack_id = django_filters.ModelMultipleChoiceFilter( - field_name="device__rack", + field_name="rack", queryset=Rack.objects.all(), label="Rack (ID)", ) role_id = django_filters.ModelMultipleChoiceFilter( - field_name="device__device_role_id", + field_name="device_role_id", queryset=DeviceRole.objects.all(), label="Role (ID)", ) role = django_filters.ModelMultipleChoiceFilter( - field_name="device__device_role__slug", + field_name="device_role__slug", queryset=DeviceRole.objects.all(), to_field_name="slug", label="Role (slug)", ) manufacturer_id = django_filters.ModelMultipleChoiceFilter( - field_name="device__device_type__manufacturer", + field_name="device_type__manufacturer", queryset=Manufacturer.objects.all(), label="Manufacturer (ID)", ) manufacturer = django_filters.ModelMultipleChoiceFilter( - field_name="device__device_type__manufacturer__slug", + field_name="device_type__manufacturer__slug", queryset=Manufacturer.objects.all(), to_field_name="slug", label="Manufacturer (slug)", @@ -105,28 +105,28 @@ class GoldenConfigFilter(CreatedUpdatedFilterSet): label="Platform (ID)", ) platform = django_filters.ModelMultipleChoiceFilter( - field_name="device__platform__slug", + field_name="platform__slug", queryset=Platform.objects.all(), to_field_name="slug", label="Platform (slug)", ) device_status_id = StatusFilter( - field_name="device__status", + field_name="status", queryset=Status.objects.all(), label="Device Status", ) device_type_id = django_filters.ModelMultipleChoiceFilter( - field_name="device__device_type_id", + field_name="device_type_id", queryset=DeviceType.objects.all(), label="Device type (ID)", ) device_id = django_filters.ModelMultipleChoiceFilter( field_name="device", queryset=Device.objects.all(), - label="Device Name", + label="Device (ID)", ) device = django_filters.ModelMultipleChoiceFilter( - field_name="device__name", + field_name="name", queryset=Device.objects.all(), label="Device Name", ) @@ -136,13 +136,13 @@ def search(self, queryset, name, value): # pylint: disable=unused-argument,no-s if not value.strip(): return queryset # Chose only device, can be convinced more should be included - qs_filter = Q(device__name__icontains=value) + qs_filter = Q(name__icontains=value) return queryset.filter(qs_filter) class Meta: """Meta class attributes for GoldenConfig.""" - model = models.GoldenConfig + model = Device distinct = True fields = [ "q", @@ -169,18 +169,131 @@ class Meta: ] -class ConfigComplianceFilter(GoldenConfigFilter): +class ConfigComplianceFilter(CreatedUpdatedFilterSet): """Filter capabilities for ConfigCompliance instances.""" + q = django_filters.CharFilter( + method="search", + label="Search", + ) + tenant_group_id = TreeNodeMultipleChoiceFilter( + queryset=TenantGroup.objects.all(), + field_name="device__tenant__group", + lookup_expr="in", + label="Tenant Group (ID)", + ) + tenant_group = TreeNodeMultipleChoiceFilter( + queryset=TenantGroup.objects.all(), + field_name="device__tenant__group", + to_field_name="slug", + lookup_expr="in", + label="Tenant Group (slug)", + ) + tenant_id = django_filters.ModelMultipleChoiceFilter( + queryset=Tenant.objects.all(), + field_name="device__tenant_id", + label="Tenant (ID)", + ) + tenant = django_filters.ModelMultipleChoiceFilter( + queryset=Tenant.objects.all(), + field_name="device__tenant__slug", + to_field_name="slug", + label="Tenant (slug)", + ) + region_id = TreeNodeMultipleChoiceFilter( + queryset=Region.objects.all(), + field_name="device__site__region", + lookup_expr="in", + label="Region (ID)", + ) + region = TreeNodeMultipleChoiceFilter( + queryset=Region.objects.all(), + field_name="device__site__region", + lookup_expr="in", + to_field_name="slug", + label="Region (slug)", + ) + site_id = django_filters.ModelMultipleChoiceFilter( + queryset=Site.objects.all(), + label="Site (ID)", + ) + site = django_filters.ModelMultipleChoiceFilter( + field_name="device__site__slug", + queryset=Site.objects.all(), + to_field_name="slug", + label="Site name (slug)", + ) + rack_group_id = TreeNodeMultipleChoiceFilter( + queryset=RackGroup.objects.all(), + field_name="device__rack__group", + lookup_expr="in", + label="Rack group (ID)", + ) + rack_id = django_filters.ModelMultipleChoiceFilter( + field_name="device__rack", + queryset=Rack.objects.all(), + label="Rack (ID)", + ) + role_id = django_filters.ModelMultipleChoiceFilter( + field_name="device__device_role_id", + queryset=DeviceRole.objects.all(), + label="Role (ID)", + ) + role = django_filters.ModelMultipleChoiceFilter( + field_name="device__device_role__slug", + queryset=DeviceRole.objects.all(), + to_field_name="slug", + label="Role (slug)", + ) + manufacturer_id = django_filters.ModelMultipleChoiceFilter( + field_name="device__device_type__manufacturer", + queryset=Manufacturer.objects.all(), + label="Manufacturer (ID)", + ) + manufacturer = django_filters.ModelMultipleChoiceFilter( + field_name="device__device_type__manufacturer__slug", + queryset=Manufacturer.objects.all(), + to_field_name="slug", + label="Manufacturer (slug)", + ) + platform_id = django_filters.ModelMultipleChoiceFilter( + queryset=Platform.objects.all(), + label="Platform (ID)", + ) + platform = django_filters.ModelMultipleChoiceFilter( + field_name="device__platform__slug", + queryset=Platform.objects.all(), + to_field_name="slug", + label="Platform (slug)", + ) + device_status_id = StatusFilter( + field_name="device__status", + queryset=Status.objects.all(), + label="Device Status", + ) + device_type_id = django_filters.ModelMultipleChoiceFilter( + field_name="device__device_type_id", + queryset=DeviceType.objects.all(), + label="Device type (ID)", + ) + device_id = django_filters.ModelMultipleChoiceFilter( + queryset=Device.objects.all(), + label="Device Name", + ) device = django_filters.ModelMultipleChoiceFilter( field_name="device__name", - queryset=Device.objects.filter( - id__in=Subquery(models.ConfigCompliance.objects.distinct("device").values("device")) - ), - to_field_name="name", + queryset=Device.objects.all(), label="Device Name", ) + def search(self, queryset, name, value): # pylint: disable=unused-argument,no-self-use + """Perform the filtered search.""" + if not value.strip(): + return queryset + # Chose only device, can be convinced more should be included + qs_filter = Q(device__name__icontains=value) + return queryset.filter(qs_filter) + class Meta: """Meta class attributes for ConfigComplianceFilter.""" diff --git a/nautobot_golden_config/forms.py b/nautobot_golden_config/forms.py index c0eacac2..2fa46376 100644 --- a/nautobot_golden_config/forms.py +++ b/nautobot_golden_config/forms.py @@ -1,7 +1,6 @@ """Forms for Device Configuration Backup.""" from django import forms -from django.db.models import Subquery import nautobot.extras.forms as extras_forms import nautobot.utilities.forms as utilities_forms @@ -15,7 +14,7 @@ class GoldenConfigFilterForm(utilities_forms.BootstrapMixin, extras_forms.CustomFieldFilterForm): """Filter Form for GoldenConfig instances.""" - model = models.GoldenConfig + model = Device class Meta: """Meta definitions of searchable fields.""" @@ -106,14 +105,6 @@ class ConfigComplianceFilterForm(GoldenConfigFilterForm): """Filter Form for ConfigCompliance instances.""" model = models.ConfigCompliance - device = utilities_forms.DynamicModelMultipleChoiceField( - queryset=Device.objects.filter( - id__in=Subquery(models.ConfigCompliance.objects.distinct("device").values("device")) - ), - to_field_name="name", - required=False, - null_option="None", - ) # ComplianceRule diff --git a/nautobot_golden_config/graphql/types.py b/nautobot_golden_config/graphql/types.py deleted file mode 100644 index 90b294fe..00000000 --- a/nautobot_golden_config/graphql/types.py +++ /dev/null @@ -1,82 +0,0 @@ -"""GraphQL implementation for golden config plugin.""" -import graphene -from graphene_django import DjangoObjectType -from graphene_django.converter import convert_django_field -from taggit.managers import TaggableManager - -from nautobot.extras.graphql.types import TagType -from nautobot_golden_config import models -from nautobot_golden_config import filters - - -@convert_django_field.register(TaggableManager) -def convert_field_to_list_tags(field, registry=None): - """Convert TaggableManager to List of Tags.""" - return graphene.List(TagType) - - -class ConfigComplianceType(DjangoObjectType): - """Graphql Type Object for Config Compliance model.""" - - class Meta: - """Meta object boilerplate for ConfigComplianceType.""" - - model = models.ConfigCompliance - filterset_class = filters.ConfigComplianceFilter - - -class GoldenConfigType(DjangoObjectType): - """Graphql Type Object for Golden Configuration model.""" - - class Meta: - """Meta object boilerplate for GoldenConfigType.""" - - model = models.GoldenConfig - filterset_class = filters.GoldenConfigFilter - - -class ComplianceRuleType(DjangoObjectType): - """Graphql Type Object for Compliance Rule model.""" - - class Meta: - """Meta object boilerplate for GoldenConfigType.""" - - model = models.ComplianceRule - filterset_class = filters.ComplianceRuleFilter - - -class GoldenConfigSettingType(DjangoObjectType): - """Graphql Type Object for Golden Config Settings model.""" - - class Meta: - """Meta object boilerplate for GoldenConfigSettingType.""" - - model = models.GoldenConfigSetting - - -class ConfigRemoveType(DjangoObjectType): - """Graphql Type Object for Backup Config Line Remove model.""" - - class Meta: - """Meta object boilerplate for ConfigRemoveType.""" - - model = models.ConfigRemove - - -class ConfigReplaceType(DjangoObjectType): - """Graphql Type Object for Backup Config Line Replace model.""" - - class Meta: - """Meta object boilerplate for ConfigReplaceType.""" - - model = models.ConfigReplace - - -graphql_types = [ - ConfigComplianceType, - GoldenConfigType, - ComplianceRuleType, - GoldenConfigSettingType, - ConfigRemoveType, - ConfigReplaceType, -] diff --git a/nautobot_golden_config/management/commands/run_config_backup.py b/nautobot_golden_config/management/commands/run_config_backup.py new file mode 100644 index 00000000..348c3949 --- /dev/null +++ b/nautobot_golden_config/management/commands/run_config_backup.py @@ -0,0 +1,22 @@ +"""Add the run_config_backup command to nautobot-server.""" + +from django.core.management.base import BaseCommand +from nautobot.extras.jobs import get_job + +from nautobot_golden_config.utilities.management import job_runner + + +class Command(BaseCommand): + """Boilerplate Command to inherit from BaseCommand.""" + + help = "Run Config Backup from Golden Config Plugin." + + def add_arguments(self, parser): + """Add arguments for run_config_backup.""" + parser.add_argument("-u", "--user", type=str, required=True, help="User to run the Job as.") + parser.add_argument("-d", "--device", type=str, help="Define a uniquely defined device name") + + def handle(self, *args, **kwargs): + """Add handler for run_config_backup.""" + job_class = get_job("plugins/nautobot_golden_config.jobs/BackupJob") + job_runner(self, job_class, kwargs.get("device"), kwargs.get("user")) diff --git a/nautobot_golden_config/management/commands/run_config_compliance.py b/nautobot_golden_config/management/commands/run_config_compliance.py new file mode 100644 index 00000000..488ecccc --- /dev/null +++ b/nautobot_golden_config/management/commands/run_config_compliance.py @@ -0,0 +1,22 @@ +"""Add the run_config_compliance command to nautobot-server.""" + +from django.core.management.base import BaseCommand +from nautobot.extras.jobs import get_job + +from nautobot_golden_config.utilities.management import job_runner + + +class Command(BaseCommand): + """Boilerplate Command to inherit from BaseCommand.""" + + help = "Run Config Compliance Job from Golden Config Plugin." + + def add_arguments(self, parser): + """Add arguments for run_config_compliance.""" + parser.add_argument("-u", "--user", type=str, required=True, help="User to run the Job as.") + parser.add_argument("-d", "--device", type=str, help="Define a uniquely defined device name") + + def handle(self, *args, **kwargs): + """Add handler for run_config_compliance.""" + job_class = get_job("plugins/nautobot_golden_config.jobs/ComplianceJob") + job_runner(self, job_class, kwargs.get("device"), kwargs.get("user")) diff --git a/nautobot_golden_config/management/commands/run_generate_config.py b/nautobot_golden_config/management/commands/run_generate_config.py new file mode 100644 index 00000000..68c70c25 --- /dev/null +++ b/nautobot_golden_config/management/commands/run_generate_config.py @@ -0,0 +1,22 @@ +"""Add the run_generate_config command to nautobot-server.""" + +from django.core.management.base import BaseCommand +from nautobot.extras.jobs import get_job + +from nautobot_golden_config.utilities.management import job_runner + + +class Command(BaseCommand): + """Boilerplate Command to inherit from BaseCommand.""" + + help = "Run Job to generate your intended configuration from Golden Config Plugin." + + def add_arguments(self, parser): + """Add arguments for run_generate_config.""" + parser.add_argument("-u", "--user", type=str, required=True, help="User to run the Job as.") + parser.add_argument("-d", "--device", type=str, help="Define a uniquely defined device name") + + def handle(self, *args, **kwargs): + """Add handler for run_generate_config.""" + job_class = get_job("plugins/nautobot_golden_config.jobs/IntendedJob") + job_runner(self, job_class, kwargs.get("device"), kwargs.get("user")) diff --git a/nautobot_golden_config/migrations/0005_json_compliance_rule.py b/nautobot_golden_config/migrations/0005_json_compliance_rule.py new file mode 100644 index 00000000..3bda4f90 --- /dev/null +++ b/nautobot_golden_config/migrations/0005_json_compliance_rule.py @@ -0,0 +1,52 @@ +from django.db import migrations, models +import json + +from nautobot_golden_config.models import ConfigCompliance + + +def jsonify(apps, schedma_editor): + """Converts textfield to json in preparation for migration.""" + queryset = ConfigCompliance.objects.all() + attrs = ["actual", "extra", "intended", "missing"] + for i in queryset: + for attr in attrs: + value = getattr(i, attr) + if value: + setattr(i, attr, json.dumps(value)) + i.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0004_auto_20210616_2234"), + ] + + operations = [ + migrations.RunPython(code=jsonify), + migrations.AlterField( + model_name="compliancerule", + name="match_config", + field=models.TextField(blank=True, null=True), + ), + migrations.AlterField( + model_name="configcompliance", + name="actual", + field=models.JSONField(blank=True), + ), + migrations.AlterField( + model_name="configcompliance", + name="extra", + field=models.JSONField(blank=True), + ), + migrations.AlterField( + model_name="configcompliance", + name="intended", + field=models.JSONField(blank=True), + ), + migrations.AlterField( + model_name="configcompliance", + name="missing", + field=models.JSONField(blank=True), + ), + ] diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index 37c22d74..3e77c9de 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -1,6 +1,7 @@ """Django Models for tracking the configuration compliance per feature and device.""" import logging +from deepdiff import DeepDiff from django.db import models from django.core.exceptions import ValidationError @@ -35,8 +36,8 @@ def null_to_empty(val): "custom_fields", "custom_validators", "export_templates", - "relationships", "graphql", + "relationships", "webhooks", ) class ComplianceFeature(PrimaryModel): @@ -70,8 +71,8 @@ def get_absolute_url(self): "custom_fields", "custom_validators", "export_templates", - "relationships", "graphql", + "relationships", "webhooks", ) class ComplianceRule(PrimaryModel): @@ -98,7 +99,7 @@ class ComplianceRule(PrimaryModel): ) match_config = models.TextField( null=True, - blank=False, + blank=True, verbose_name="Config to Match", help_text="The config to match that is matched based on the parent most configuration. e.g. `router bgp` or `ntp`.", ) @@ -143,8 +144,6 @@ def clean(self): """Verify that if cli, then match_config is set.""" if self.config_type == ComplianceRuleTypeChoice.TYPE_CLI and not self.match_config: raise ValidationError("CLI configuration set, but no configuration set to match.") - if self.config_type == ComplianceRuleTypeChoice.TYPE_JSON: - raise ValidationError("JSON currently not supported.") @extras_features( @@ -162,11 +161,12 @@ class ConfigCompliance(PrimaryModel): device = models.ForeignKey(to="dcim.Device", on_delete=models.CASCADE, help_text="The device", blank=False) rule = models.ForeignKey(to="ComplianceRule", on_delete=models.CASCADE, blank=False, related_name="rule") compliance = models.BooleanField(null=True, blank=True) - actual = models.TextField(blank=True, help_text="Actual Configuration for feature") - intended = models.TextField(blank=True, help_text="Intended Configuration for feature") - missing = models.TextField(blank=True, help_text="Configuration that should be on the device.") - extra = models.TextField(blank=True, help_text="Configuration that should not be on the device.") + actual = models.JSONField(blank=True, help_text="Actual Configuration for feature") + intended = models.JSONField(blank=True, help_text="Intended Configuration for feature") + missing = models.JSONField(blank=True, help_text="Configuration that should be on the device.") + extra = models.JSONField(blank=True, help_text="Configuration that should not be on the device.") ordered = models.BooleanField(default=True) + # Used for django-pivot, both compliance and compliance_int should be set. compliance_int = models.IntegerField(null=True, blank=True) csv_headers = ["Device Name", "Feature", "Compliance"] @@ -199,23 +199,47 @@ def __str__(self): return f"{self.device} -> {self.rule} -> {self.compliance}" def save(self, *args, **kwargs): - """Perform that actual compliance check.""" + """Performs the actual compliance check.""" feature = { "ordered": self.rule.config_ordered, - "name": self.rule.feature.name, - "section": self.rule.match_config.splitlines(), + "name": self.rule, } - value = feature_compliance(feature, self.actual, self.intended, get_platform(self.device.platform.slug)) - self.compliance = value["compliant"] - if self.compliance: - self.compliance_int = 1 + if self.rule.config_type == ComplianceRuleTypeChoice.TYPE_JSON: + feature.update({"section": self.rule.match_config}) + + diff = DeepDiff(self.actual, self.intended, ignore_order=self.ordered, report_repetition=True) + if not diff: + self.compliance_int = 1 + self.compliance = True + self.missing = "" + self.extra = "" + else: + self.compliance_int = 0 + self.compliance = False + self.missing = null_to_empty(self._normalize_diff(diff, "added")) + self.extra = null_to_empty(self._normalize_diff(diff, "removed")) else: - self.compliance_int = 0 - self.ordered: value["ordered_compliant"] - self.missing = null_to_empty(value["missing"]) - self.extra = null_to_empty(value["extra"]) + feature.update({"section": self.rule.match_config.splitlines()}) + value = feature_compliance(feature, self.actual, self.intended, get_platform(self.device.platform.slug)) + self.compliance = value["compliant"] + if self.compliance: + self.compliance_int = 1 + else: + self.compliance_int = 0 + self.ordered = value["ordered_compliant"] + self.missing = null_to_empty(value["missing"]) + self.extra = null_to_empty(value["extra"]) super().save(*args, **kwargs) + @staticmethod + def _normalize_diff(diff, path_to_diff): + """Normalizes the diff to a list of keys and list indexes that have changed.""" + dictionary_items = list(diff.get(f"dictionary_item_{path_to_diff}", [])) + list_items = list(diff.get(f"iterable_item_{path_to_diff}", {}).keys()) + values_changed = list(diff.get("values_changed", {}).keys()) + type_changes = list(diff.get("type_changes", {}).keys()) + return dictionary_items + list_items + values_changed + type_changes + @extras_features( "custom_fields", @@ -288,6 +312,9 @@ def __str__(self): return f"{self.device}" +@extras_features( + "graphql", +) class GoldenConfigSetting(PrimaryModel): """GoldenConfigSetting Model defintion. This provides global configs instead of via configs.py.""" @@ -346,7 +373,7 @@ class GoldenConfigSetting(PrimaryModel): encoder=DjangoJSONEncoder, blank=True, null=True, - help_text="Queryset filter matching the list of devices for the scope of devices to be considered.", + help_text="API filter in JSON format matching the list of devices for the scope of devices to be considered.", ) sot_agg_query = models.TextField( null=False, diff --git a/nautobot_golden_config/nornir_plays/config_backup.py b/nautobot_golden_config/nornir_plays/config_backup.py index 813fc502..7dfc69db 100644 --- a/nautobot_golden_config/nornir_plays/config_backup.py +++ b/nautobot_golden_config/nornir_plays/config_backup.py @@ -7,6 +7,7 @@ from nornir.core.task import Result, Task from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.tasks.dispatcher import dispatcher from nornir_nautobot.utils.logger import NornirLogger @@ -106,19 +107,23 @@ def config_backup(job_result, data, backup_root_folder): if not replace_regex_dict.get(regex.platform.slug): replace_regex_dict[regex.platform.slug] = [] replace_regex_dict[regex.platform.slug].append({"replace": regex.replace, "regex": regex.regex}) - nornir_obj = InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "nautobot-inventory", - "options": { - "credentials_class": NORNIR_SETTINGS.get("credentials"), - "params": NORNIR_SETTINGS.get("inventory_params"), - "queryset": get_job_filter(data), - "defaults": {"now": now}, + try: + nornir_obj = InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "nautobot-inventory", + "options": { + "credentials_class": NORNIR_SETTINGS.get("credentials"), + "params": NORNIR_SETTINGS.get("inventory_params"), + "queryset": get_job_filter(data), + "defaults": {"now": now}, + }, }, - }, - ) + ) + except NornirNautobotException as err: + logger.log_failure(None, err) + raise NornirNautobotException() nr_with_processors = nornir_obj.with_processors([ProcessGoldenConfig(logger)]) diff --git a/nautobot_golden_config/nornir_plays/config_compliance.py b/nautobot_golden_config/nornir_plays/config_compliance.py index 18dfea6a..d7a67440 100644 --- a/nautobot_golden_config/nornir_plays/config_compliance.py +++ b/nautobot_golden_config/nornir_plays/config_compliance.py @@ -11,8 +11,8 @@ from nornir.core.plugins.inventory import InventoryPluginRegister from nornir.core.task import Result, Task -from nornir_nautobot.utils.logger import NornirLogger from nornir_nautobot.exceptions import NornirNautobotException +from nornir_nautobot.utils.logger import NornirLogger from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nautobot_plugin_nornir.constants import NORNIR_SETTINGS @@ -31,16 +31,16 @@ LOGGER = logging.getLogger(__name__) -def get_features(): - """A serializer of sorts to return feature mappings as a dictionary.""" +def get_rules(): + """A serializer of sorts to return rule mappings as a dictionary.""" # TODO: Review if creating a proper serializer is the way to go. - features = {} - for obj in ComplianceRule.objects.all(): + rules = {} + for obj in ComplianceRule.objects.filter(config_type="cli"): platform = str(obj.platform.slug) - if not features.get(platform): - features[platform] = [] - features[platform].append({"ordered": obj.config_ordered, "obj": obj, "section": obj.match_config.splitlines()}) - return features + if not rules.get(platform): + rules[platform] = [] + rules[platform].append({"ordered": obj.config_ordered, "obj": obj, "section": obj.match_config.splitlines()}) + return rules def diff_files(backup_file, intended_file): @@ -58,7 +58,7 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals global_settings, backup_root_path, intended_root_folder, - features, + rules, ) -> Result: """Prepare data for compliance task. @@ -92,26 +92,28 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals raise NornirNautobotException() platform = obj.platform.slug - if not features.get(platform): - logger.log_failure(obj, f"There is no `user` defined feature mapping for platform slug {platform}.") + if not rules.get(platform): + logger.log_failure(obj, f"There is no defined `Configuration Rule` for platform slug `{platform}`.") raise NornirNautobotException() if get_platform(platform) not in parser_map.keys(): - logger.log_failure(obj, f"There is currently no parser support for platform slug {get_platform(platform)}.") + logger.log_failure(obj, f"There is currently no parser support for platform slug `{get_platform(platform)}`.") raise NornirNautobotException() backup_cfg = _open_file_config(backup_file) intended_cfg = _open_file_config(intended_file) # TODO: Make this atomic with compliance_obj step. - for feature in features[obj.platform.slug]: + for rule in rules[obj.platform.slug]: # using update_or_create() method to conveniently update actual obj or create new one. ConfigCompliance.objects.update_or_create( device=obj, - rule=feature["obj"], + rule=rule["obj"], defaults={ - "actual": section_config(feature, backup_cfg, get_platform(platform)), - "intended": section_config(feature, intended_cfg, get_platform(platform)), + "actual": section_config(rule, backup_cfg, get_platform(platform)), + "intended": section_config(rule, intended_cfg, get_platform(platform)), + "missing": "", + "extra": "", }, ) @@ -126,23 +128,27 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals def config_compliance(job_result, data, backup_root_path, intended_root_folder): """Nornir play to generate configurations.""" now = datetime.now() - features = get_features() + rules = get_rules() logger = NornirLogger(__name__, job_result, data.get("debug")) global_settings = GoldenConfigSetting.objects.first() verify_global_settings(logger, global_settings, ["backup_path_template", "intended_path_template"]) - nornir_obj = InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "nautobot-inventory", - "options": { - "credentials_class": NORNIR_SETTINGS.get("credentials"), - "params": NORNIR_SETTINGS.get("inventory_params"), - "queryset": get_job_filter(data), - "defaults": {"now": now}, + try: + nornir_obj = InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "nautobot-inventory", + "options": { + "credentials_class": NORNIR_SETTINGS.get("credentials"), + "params": NORNIR_SETTINGS.get("inventory_params"), + "queryset": get_job_filter(data), + "defaults": {"now": now}, + }, }, - }, - ) + ) + except NornirNautobotException as err: + logger.log_failure(None, err) + raise NornirNautobotException() nr_with_processors = nornir_obj.with_processors([ProcessGoldenConfig(logger)]) nr_with_processors.run( @@ -152,7 +158,7 @@ def config_compliance(job_result, data, backup_root_path, intended_root_folder): global_settings=global_settings, backup_root_path=backup_root_path, intended_root_folder=intended_root_folder, - features=features, + rules=rules, ) logger.log_debug("Completed Compliance for devices.") diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py index 3bd8d92e..b12de455 100644 --- a/nautobot_golden_config/nornir_plays/config_intended.py +++ b/nautobot_golden_config/nornir_plays/config_intended.py @@ -9,9 +9,9 @@ from nornir.core.task import Result, Task +from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.tasks.dispatcher import dispatcher from nornir_nautobot.utils.logger import NornirLogger -from nornir_nautobot.exceptions import NornirNautobotException from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nautobot_plugin_nornir.constants import NORNIR_SETTINGS @@ -88,19 +88,23 @@ def config_intended(job_result, data, jinja_root_path, intended_root_folder): logger = NornirLogger(__name__, job_result, data.get("debug")) global_settings = GoldenConfigSetting.objects.first() verify_global_settings(logger, global_settings, ["jinja_path_template", "intended_path_template", "sot_agg_query"]) - nornir_obj = InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "nautobot-inventory", - "options": { - "credentials_class": NORNIR_SETTINGS.get("credentials"), - "params": NORNIR_SETTINGS.get("inventory_params"), - "queryset": get_job_filter(data), - "defaults": {"now": now}, + try: + nornir_obj = InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "nautobot-inventory", + "options": { + "credentials_class": NORNIR_SETTINGS.get("credentials"), + "params": NORNIR_SETTINGS.get("inventory_params"), + "queryset": get_job_filter(data), + "defaults": {"now": now}, + }, }, - }, - ) + ) + except NornirNautobotException as err: + logger.log_failure(None, err) + raise NornirNautobotException() nr_with_processors = nornir_obj.with_processors([ProcessGoldenConfig(logger)]) diff --git a/nautobot_golden_config/tables.py b/nautobot_golden_config/tables.py index cbbf46c9..abb1834d 100644 --- a/nautobot_golden_config/tables.py +++ b/nautobot_golden_config/tables.py @@ -5,6 +5,7 @@ from django_tables2 import Column, LinkColumn, TemplateColumn from django_tables2.utils import A +from nautobot.dcim.models import Device from nautobot.utilities.tables import ( BaseTable, ToggleColumn, @@ -12,89 +13,62 @@ from nautobot_golden_config import models from nautobot_golden_config.utilities.constant import ENABLE_BACKUP, ENABLE_COMPLIANCE, ENABLE_INTENDED, CONFIG_FEATURES -BACKUP_SUCCESS = """ -{% if record.backup_last_success_date and record.backup_last_attempt_date == record.backup_last_success_date %} - -{% else %} - -{% endif %} -{% if record.backup_last_success_date %} - {{ record.backup_last_success_date|date:"SHORT_DATETIME_FORMAT" }} -{% else %} - -- -{% endif %} - {{ record.backup_last_attempt_date|date:"SHORT_DATETIME_FORMAT" }} - -""" - -INTENDED_SUCCESS = """ -{% if record.intended_last_success_date and record.intended_last_attempt_date == record.intended_last_success_date %} - -{% else %} - -{% endif %} -{% if record.intended_last_success_date %} - {{ record.intended_last_success_date|date:"SHORT_DATETIME_FORMAT" }} -{% else %} - -- -{% endif %} - {{ record.intended_last_attempt_date|date:"SHORT_DATETIME_FORMAT" }} - -""" - - -COMPLIANCE_SUCCESS = """ -{% if record.compliance_last_success_date and record.compliance_last_attempt_date == record.compliance_last_success_date %} - -{% else %} - -{% endif %} -{% if record.compliance_last_success_date %} - {{ record.compliance_last_success_date|date:"SHORT_DATETIME_FORMAT" }} -{% else %} - -- -{% endif %} - {{ record.compliance_last_attempt_date|date:"SHORT_DATETIME_FORMAT" }} - -""" ALL_ACTIONS = """ {% if backup == True %} - {% if record.backup_config %} - - - - {% else %} + {% if record.configcompliance_set.first.rule.config_type == 'json' %} + {% else %} + {% if record.goldenconfig_set.first.backup_config %} + + + + {% else %} + + {% endif %} {% endif %} {% endif %} {% if intended == True %} - {% if record.intended_config %} - - - - {% else %} + {% if record.configcompliance_set.first.rule.config_type == 'json' %} + {% else %} + {% if record.goldenconfig_set.first.intended_config %} + + + + {% else %} + + {% endif %} {% endif %} {% endif %} {% if compliance == True %} - {% if record.compliance_config %} - - - + {% if record.configcompliance_set.first.rule.config_type == 'json' %} + + + {% else %} - + {% if record.goldenconfig_set.first.compliance_config %} + + + + {% else %} + + {% endif %} {% endif %} {% endif %} {% if sotagg == True %} - + - - - - + {% if record.configcompliance_set.first.rule.config_type == 'json' %} + + {% else %} + + + + + {% endif %} {% endif %} """ @@ -103,7 +77,7 @@ def actual_fields(): """Convienance function to conditionally toggle columns.""" - active_fields = ["pk", "device__name"] + active_fields = ["pk", "name"] if ENABLE_BACKUP: active_fields.append("backup_last_success_date") if ENABLE_INTENDED: @@ -227,27 +201,55 @@ class GoldenConfigTable(BaseTable): """Table to display Config Management Status.""" pk = ToggleColumn() - device__name = TemplateColumn( - template_code="""{{ record.device }}""", + name = TemplateColumn( + template_code="""{{ record.name }}""", verbose_name="Device", ) + if ENABLE_BACKUP: - backup_last_success_date = TemplateColumn(verbose_name="Backup Status", template_code=BACKUP_SUCCESS) + backup_last_success_date = Column(verbose_name="Backup Status", empty_values=()) if ENABLE_INTENDED: - intended_last_success_date = TemplateColumn(verbose_name="Intended Status", template_code=INTENDED_SUCCESS) + intended_last_success_date = Column(verbose_name="Intended Status", empty_values=()) if ENABLE_COMPLIANCE: - compliance_last_success_date = TemplateColumn( - verbose_name="Compliance Status", template_code=COMPLIANCE_SUCCESS - ) + compliance_last_success_date = Column(verbose_name="Compliance Status", empty_values=()) actions = TemplateColumn( template_code=ALL_ACTIONS, verbose_name="Actions", extra_context=CONFIG_FEATURES, orderable=False ) + def _render_last_success_date(self, record, column, value): # pylint: disable=no-self-use + """Abstract method to get last success per row record.""" + entry = record.goldenconfig_set.first() + last_success_date = getattr(entry, f"{value}_last_success_date", None) + last_attempt_date = getattr(entry, f"{value}_last_attempt_date", None) + if not last_success_date or not last_attempt_date: + column.attrs = {"td": {"style": "color:black"}} + return "--" + if not last_success_date and not last_attempt_date: + column.attrs = {"td": {"style": "color:black"}} + return "--" + if last_success_date and last_attempt_date == last_success_date: + column.attrs = {"td": {"style": "color:green"}} + return last_success_date + column.attrs = {"td": {"style": "color:red"}} + return last_success_date + + def render_backup_last_success_date(self, record, column): # pylint: disable=no-self-use + """Pull back backup last success per row record.""" + return self._render_last_success_date(record, column, "backup") + + def render_intended_last_success_date(self, record, column): # pylint: disable=no-self-use + """Pull back intended last success per row record.""" + return self._render_last_success_date(record, column, "intended") + + def render_compliance_last_success_date(self, record, column): # pylint: disable=no-self-use + """Pull back compliance last success per row record.""" + return self._render_last_success_date(record, column, "compliance") + class Meta(BaseTable.Meta): - """Meta for class CircuitMaintenanceTable.""" + """Meta for class GoldenConfigTable.""" - model = models.GoldenConfig + model = Device fields = actual_fields() diff --git a/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails.html b/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails.html index bc75b270..4a94a5dd 100644 --- a/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails.html +++ b/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails.html @@ -14,7 +14,7 @@ {% if config_type == "sotagg" %}

Aggregate Data - {{ device_name }}

-{% elif config_type == "compliance" %} +{% elif config_type in "compliance,json_compliance" %}

Compliance Details - {{ device_name }}

{% elif config_type == "backup" %}

Backup Configuration Details - {{ device_name }}

@@ -25,15 +25,15 @@

Intended Configuration Details - {{ device_name }}

{% if config_type == "sotagg" and format == 'yaml' %}
Output - {% include 'extras/inc/configcontext_format.html' %} + {% include include_file %}
{{ output }}
{% elif config_type == "sotagg" and format == 'json' %}
Output - {% include 'extras/inc/configcontext_format.html' %} + {% include include_file %}
{{ output }}
- {% elif config_type == "compliance" %} + {% elif config_type in "compliance,json_compliance" %}
{% else %} diff --git a/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails_modal.html b/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails_modal.html index e3ef9c77..928a0145 100644 --- a/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails_modal.html +++ b/nautobot_golden_config/templates/nautobot_golden_config/configcompliancedetails_modal.html @@ -10,7 +10,7 @@ {% if config_type == "sotagg"%}

Aggregate Data - {{ device_name }}

-{% elif config_type == "compliance"%} +{% elif "compliance" in config_type %}

Compliance Details - {{ device_name }}

{% elif config_type == "backup"%}

Backup Configuration Details - {{ device_name }}

@@ -18,7 +18,7 @@

Backup Configuration Details - {{ device_name }}

Intended Configuration Details - {{ device_name }}

{% endif %}
- {% if config_type == "compliance" %} + {% if "compliance" in config_type %}
{% else %} diff --git a/nautobot_golden_config/tests/conftest.py b/nautobot_golden_config/tests/conftest.py new file mode 100644 index 00000000..28622a54 --- /dev/null +++ b/nautobot_golden_config/tests/conftest.py @@ -0,0 +1,45 @@ +"""Params for testing.""" +from nautobot.dcim.models import Device, Site, Manufacturer, DeviceType, DeviceRole, Region, Platform + +from nautobot_golden_config.models import ConfigCompliance, ComplianceFeature, ComplianceRule +from nautobot_golden_config.choices import ComplianceRuleTypeChoice + + +def create_device(name="foobaz"): + """Creates a Device to be used with tests.""" + parent_region, _ = Region.objects.get_or_create(name="Parent Region", slug="parent_region") + child_region, _ = Region.objects.get_or_create(name="Child Region", slug="child_region", parent=parent_region) + site, _ = Site.objects.get_or_create(name="foo", slug="foo", region=child_region) + manufacturer, _ = Manufacturer.objects.get_or_create(name="bar") + device_role, _ = DeviceRole.objects.get_or_create(name="baz") + device_type, _ = DeviceType.objects.get_or_create(manufacturer=manufacturer) + platform, _ = Platform.objects.get_or_create(manufacturer=manufacturer) + device = Device.objects.create( + name=name, platform=platform, site=site, device_role=device_role, device_type=device_type + ) + return device + + +def create_feature_rule_json(device, feature="foo", rule="json"): + """Creates a Feature/Rule Mapping and Returns the rule.""" + feature_obj = ComplianceFeature(slug=feature, name=feature) + feature_obj.save() + rule = ComplianceRule( + feature=feature_obj, + platform=device.platform, + config_type=ComplianceRuleTypeChoice.TYPE_JSON, + config_ordered=False, + ) + rule.save() + return rule + + +def create_config_compliance(device, compliance_rule=None, actual=None, intended=None): + """Creates a ConfigCompliance to be used with tests.""" + config_compliance = ConfigCompliance.objects.create( + device=device, + rule=compliance_rule, + actual=actual, + intended=intended, + ) + return config_compliance diff --git a/nautobot_golden_config/tests/test_api.py b/nautobot_golden_config/tests/test_api.py index deefcd21..ff744ec2 100644 --- a/nautobot_golden_config/tests/test_api.py +++ b/nautobot_golden_config/tests/test_api.py @@ -6,6 +6,8 @@ from nautobot.utilities.testing import APITestCase +from .conftest import create_device, create_feature_rule_json, create_config_compliance + User = get_user_model() @@ -13,10 +15,69 @@ class GoldenConfigAPITest(APITestCase): # pylint: disable=too-many-ancestors """Test the ConfigCompliance API.""" + def setUp(self): + """Create a superuser and token for API calls.""" + super().setUp() + self.device = create_device() + self.compliance_rule_json = create_feature_rule_json(self.device) + self.base_view = reverse("plugins-api:nautobot_golden_config-api:configcompliance-list") + + def test_root(self): + """Validate the root for Nautobot Chatops API.""" + url = reverse("plugins-api:nautobot_golden_config-api:api-root") + response = self.client.get("{}?format=api".format(url), **self.header) + self.assertEqual(response.status_code, 200) + def test_device_list(self): """Verify that devices can be listed.""" url = reverse("dcim-api:device-list") self.add_permissions("dcim.view_device") response = self.client.get(url, **self.header) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["count"], 0) + self.assertEqual(response.data["count"], 1) + + def test_config_compliance_list_view(self): + """Verify that config compliance objects can be listed.""" + actual = '{"foo": {"bar-1": "baz"}}' + intended = '{"foo": {"bar-2": "baz"}}' + create_config_compliance( + self.device, actual=actual, intended=intended, compliance_rule=self.compliance_rule_json + ) + self.add_permissions("nautobot_golden_config.view_configcompliance") + response = self.client.get(self.base_view, **self.header) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 1) + + def test_config_compliance_post_new_json_compliant(self): + """Verify that config compliance detail view.""" + self.add_permissions("nautobot_golden_config.add_configcompliance") + response = self.client.post( + self.base_view, + data={ + "device": self.device.id, + "intended": '{"foo": {"bar-1": "baz"}}', + "actual": '{"foo": {"bar-1": "baz"}}', + "rule": self.compliance_rule_json.id, + }, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertTrue(response.data["compliance"]) + + def test_config_compliance_post_new_json_not_compliant(self): + """Verify that config compliance detail view.""" + self.add_permissions("nautobot_golden_config.add_configcompliance") + response = self.client.post( + self.base_view, + data={ + "device": self.device.id, + "intended": '{"foo": {"bar-1": "baz"}}', + "actual": '{"foo": {"bar-2": "baz"}}', + "rule": self.compliance_rule_json.id, + }, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertFalse(response.data["compliance"]) diff --git a/nautobot_golden_config/tests/test_models.py b/nautobot_golden_config/tests/test_models.py index d99407b2..f315baa1 100644 --- a/nautobot_golden_config/tests/test_models.py +++ b/nautobot_golden_config/tests/test_models.py @@ -2,18 +2,74 @@ from django.test import TestCase from django.core.exceptions import ValidationError - +from django.db.utils import IntegrityError from nautobot.dcim.models import Platform from nautobot_golden_config.models import ( + ConfigCompliance, GoldenConfigSetting, ConfigRemove, ConfigReplace, ) +from .conftest import create_device, create_feature_rule_json, create_config_compliance + class ConfigComplianceModelTestCase(TestCase): - """Test ConfigCompliance Model.""" + """Test CRUD operations for ConfigCompliance Model.""" + + def setUp(self): + """Set up base objects.""" + self.device = create_device() + self.compliance_rule_json = create_feature_rule_json(self.device) + + def test_create_config_compliance_success_json(self): + """Successful.""" + actual = {"foo": {"bar-1": "baz"}} + intended = {"foo": {"bar-2": "baz"}} + cc_obj = create_config_compliance( + self.device, actual=actual, intended=intended, compliance_rule=self.compliance_rule_json + ) + + self.assertFalse(cc_obj.compliance) + self.assertEqual(cc_obj.actual, {"foo": {"bar-1": "baz"}}) + self.assertEqual(cc_obj.intended, {"foo": {"bar-2": "baz"}}) + self.assertEqual(cc_obj.missing, ["root['foo']['bar-2']"]) + self.assertEqual(cc_obj.extra, ["root['foo']['bar-1']"]) + + def test_create_config_compliance_unique_failure(self): + """Raises error when attempting to create duplicate.""" + ConfigCompliance.objects.create( + device=self.device, + rule=self.compliance_rule_json, + actual={"foo": {"bar-1": "baz"}}, + intended={"foo": {"bar-2": "baz"}}, + missing={}, + extra={}, + ) + with self.assertRaises(IntegrityError): + ConfigCompliance.objects.create( + device=self.device, + rule=self.compliance_rule_json, + compliance=False, + actual={"foo": {"bar-1": "baz"}}, + intended={"foo": {"bar-2": "baz"}}, + missing={}, + extra={}, + ) + + def test_create_config_compliance_success_compliant(self): + """Successful.""" + cc_obj = ConfigCompliance.objects.create( + device=self.device, + rule=self.compliance_rule_json, + actual={"foo": {"bar-1": "baz"}}, + intended={"foo": {"bar-1": "baz"}}, + ) + + self.assertTrue(cc_obj.compliance) + self.assertEqual(cc_obj.missing, "") + self.assertEqual(cc_obj.extra, "") class GoldenConfigTestCase(TestCase): diff --git a/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py b/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py index c1511fee..6a74f8b5 100644 --- a/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py +++ b/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py @@ -2,22 +2,22 @@ import unittest from unittest.mock import patch, Mock -from nautobot_golden_config.nornir_plays.config_compliance import get_features +from nautobot_golden_config.nornir_plays.config_compliance import get_rules class ConfigComplianceTest(unittest.TestCase): """Test Nornir Compliance Task.""" @patch("nautobot_golden_config.nornir_plays.config_compliance.ComplianceRule", autospec=True) - def test_get_features(self, mock_compliance_rule): + def test_get_rules(self, mock_compliance_rule): """Test proper return when Features are returned.""" features = {"config_ordered": "test_ordered", "match_config": "aaa\nsnmp\n"} mock_obj = Mock(**features) mock_obj.name = "test_name" mock_obj.platform = Mock(slug="test_slug") - mock_compliance_rule.objects.all.return_value = [mock_obj] - features = get_features() - mock_compliance_rule.objects.all.assert_called_once() + mock_compliance_rule.objects.filter.return_value = [mock_obj] + features = get_rules() + mock_compliance_rule.objects.filter.assert_called_once() self.assertEqual( features, {"test_slug": [{"obj": mock_obj, "ordered": "test_ordered", "section": ["aaa", "snmp"]}]} ) diff --git a/nautobot_golden_config/tests/test_utilities/test_git.py b/nautobot_golden_config/tests/test_utilities/test_git.py index f9c9ec35..cc7b9e1f 100644 --- a/nautobot_golden_config/tests/test_utilities/test_git.py +++ b/nautobot_golden_config/tests/test_utilities/test_git.py @@ -14,11 +14,13 @@ def setUp(self): mock_obj.filesystem_path = "/fake/path" mock_obj.remote_url = "/fake/remote" mock_obj._token = "fake token" # pylint: disable=protected-access + mock_obj.username = None self.mock_obj = mock_obj @patch("nautobot_golden_config.utilities.git.Repo", autospec=True) def test_gitrepo_path_noexist(self, mock_repo): """Test Repo is not called when path isn't valid, ensure clone is called.""" + self.mock_obj.username = None GitRepo(self.mock_obj) mock_repo.assert_not_called() mock_repo.clone_from.assert_called_with("/fake/remote", to_path="/fake/path") @@ -28,6 +30,35 @@ def test_gitrepo_path_noexist(self, mock_repo): def test_gitrepo_path_exist(self, mock_repo, mock_os): """Test Repo is not called when path is valid, ensure Repo is called.""" mock_os.path.isdir.return_value = True + self.mock_obj.username = None GitRepo(self.mock_obj) mock_repo.assert_called_once() mock_repo.assert_called_with(path="/fake/path") + + @patch("nautobot_golden_config.utilities.git.os") + @patch("nautobot_golden_config.utilities.git.Repo", autospec=True) + def test_path_exist_token_and_username(self, mock_repo, mock_os): + """Test Repo is not called when path is valid, ensure Repo is called.""" + mock_os.path.isdir.return_value = True + self.mock_obj.username = "Test User" + GitRepo(self.mock_obj) + mock_repo.assert_called_once() + mock_repo.assert_called_with(path="/fake/path") + + @patch("nautobot_golden_config.utilities.git.os") + @patch("nautobot_golden_config.utilities.git.Repo", autospec=True) + def test_username_with_symbols(self, mock_repo, mock_os): + """Test Repo is not called when path is valid, ensure Repo is called.""" + mock_os.path.isdir.return_value = True + self.mock_obj.username = "user@fakeemail.local" + GitRepo(self.mock_obj) + mock_repo.assert_called_once() + mock_repo.assert_called_with(path="/fake/path") + + @patch("nautobot_golden_config.utilities.git.os") + @patch("nautobot_golden_config.utilities.git.Repo", autospec=True) + def test_git_with_username(self, mock_repo, mock_os): # pylint: disable=unused-argument + """Test username with special character works.""" + self.mock_obj.username = "admin@ntc.com" + GitRepo(self.mock_obj) + mock_repo.assert_called_once() diff --git a/nautobot_golden_config/tests/test_utilities/test_utils.py b/nautobot_golden_config/tests/test_utilities/test_utils.py new file mode 100644 index 00000000..bc2712d4 --- /dev/null +++ b/nautobot_golden_config/tests/test_utilities/test_utils.py @@ -0,0 +1,28 @@ +"""Test Utils Functions.""" +import unittest +from unittest.mock import patch + +from nautobot_golden_config.utilities.utils import get_platform + + +class GetPlatformTest(unittest.TestCase): + """Test Get Platform and User Defined Option.""" + + def test_get_platform_no_settings_definition(self): + """Test defaults when settings platform_slug_map not used.""" + self.assertEqual(get_platform("cisco"), "cisco") + + @patch("nautobot_golden_config.utilities.utils.PLUGIN_CFG", {"platform_slug_map": None}) + def test_get_platform_with_key_none(self): + """Test user defined platform mappings and defaults key defined and set to None.""" + self.assertEqual(get_platform("cisco"), "cisco") + + @patch("nautobot_golden_config.utilities.utils.PLUGIN_CFG", {"platform_slug_map": {"cisco": "cisco_ios"}}) + def test_get_platform_user_defined(self): + """Test user defined platform mappings.""" + self.assertEqual(get_platform("cisco"), "cisco_ios") + + @patch("nautobot_golden_config.utilities.utils.PLUGIN_CFG", {"platform_slug_map": {"cisco_xe": "cisco_ios"}}) + def test_get_platform_defined_but_not_relevant(self): + """Test user defined platform mappings not relevant.""" + self.assertEqual(get_platform("cisco_ios"), "cisco_ios") diff --git a/nautobot_golden_config/utilities/git.py b/nautobot_golden_config/utilities/git.py index 489e062e..556c7853 100644 --- a/nautobot_golden_config/utilities/git.py +++ b/nautobot_golden_config/utilities/git.py @@ -4,8 +4,10 @@ import re import logging +from urllib.parse import quote from git import Repo + LOGGER = logging.getLogger(__name__) @@ -25,10 +27,12 @@ def __init__(self, obj): if self.token and self.token not in self.url: # Some Git Providers require a user as well as a token. if self.token_user: - self.url = re.sub("//", f"//{self.token_user}:{self.token}@", self.url) + self.url = re.sub( + "//", f"//{quote(str(self.token_user), safe='')}:{quote(str(self.token), safe='')}@", self.url + ) else: # Github only requires the token. - self.url = re.sub("//", f"//{self.token}@", self.url) + self.url = re.sub("//", f"//{quote(str(self.token), safe='')}@", self.url) self.branch = obj.branch self.obj = obj diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index 65c628b6..2e933538 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -17,7 +17,6 @@ "tenant", "region", "site", - "platform", "role", "rack", "rack_group", diff --git a/nautobot_golden_config/utilities/management.py b/nautobot_golden_config/utilities/management.py new file mode 100644 index 00000000..572b50d1 --- /dev/null +++ b/nautobot_golden_config/utilities/management.py @@ -0,0 +1,97 @@ +"""Util functions that are leveraged by the managed commands.""" +# pylint: disable=too-many-branches,bad-option-value +import time +import uuid + +from django.contrib.contenttypes.models import ContentType +from django.utils import timezone +from django.test.client import RequestFactory + +from nautobot.extras.choices import JobResultStatusChoices +from nautobot.extras.models import JobResult +from nautobot.extras.jobs import run_job +from nautobot.dcim.models import Device +from nautobot.users.models import User + + +# Largely based on nautobot core run_job command, which does not allow variables to be sent +# so copied instead of used directly. +def job_runner(handle_class, job_class, device=None, user=None): + """Function to make management command code more DRY.""" + data = {} + + if device: + data["device"] = Device.objects.get(name=device) + + request = RequestFactory().request(SERVER_NAME="WebRequestContext") + request.id = uuid.uuid4() + request.user = User.objects.get(username=user) + + job_content_type = ContentType.objects.get(app_label="extras", model="job") + + # Run the job and create a new JobResult + handle_class.stdout.write("[{:%H:%M:%S}] Running {}...".format(timezone.now(), job_class.class_path)) + + job_result = JobResult.enqueue_job( + run_job, + job_class.class_path, + job_content_type, + request.user, + data=data, + request=request, + commit=True, + ) + + # Wait on the job to finish + while job_result.status not in JobResultStatusChoices.TERMINAL_STATE_CHOICES: + time.sleep(1) + job_result = JobResult.objects.get(pk=job_result.pk) + + # Report on success/failure + for test_name, attrs in job_result.data.items(): + + if test_name in ["total", "output"]: + continue + + handle_class.stdout.write( + "\t{}: {} success, {} info, {} warning, {} failure".format( + test_name, + attrs["success"], + attrs["info"], + attrs["warning"], + attrs["failure"], + ) + ) + + for log_entry in attrs["log"]: + status = log_entry[1] + if status == "success": + status = handle_class.style.SUCCESS(status) + elif status == "info": + status = status # pylint: disable=self-assigning-variable + elif status == "warning": + status = handle_class.style.WARNING(status) + elif status == "failure": + status = handle_class.style.NOTICE(status) + + if log_entry[2]: # object associated with log entry + handle_class.stdout.write(f"\t\t{status}: {log_entry[2]}: {log_entry[-1]}") + else: + handle_class.stdout.write(f"\t\t{status}: {log_entry[-1]}") + + if job_result.data["output"]: + handle_class.stdout.write(job_result.data["output"]) + + if job_result.status == JobResultStatusChoices.STATUS_FAILED: + status = handle_class.style.ERROR("FAILED") + elif job_result.status == JobResultStatusChoices.STATUS_ERRORED: + status = handle_class.style.ERROR("ERRORED") + else: + status = handle_class.style.SUCCESS("SUCCESS") + handle_class.stdout.write("[{:%H:%M:%S}] {}: {}".format(timezone.now(), job_class.class_path, status)) + + # Wrap things up + handle_class.stdout.write( + "[{:%H:%M:%S}] {}: Duration {}".format(timezone.now(), job_class.class_path, job_result.duration) + ) + handle_class.stdout.write("[{:%H:%M:%S}] Finished".format(timezone.now())) diff --git a/nautobot_golden_config/utilities/utils.py b/nautobot_golden_config/utilities/utils.py index b6c12b01..a3df5500 100644 --- a/nautobot_golden_config/utilities/utils.py +++ b/nautobot_golden_config/utilities/utils.py @@ -5,6 +5,6 @@ def get_platform(platform): """Utility method to map user defined platform slug to netutils named entity.""" - if PLUGIN_CFG.get("platform_slug_map", {}).get(platform): - return PLUGIN_CFG["platform_slug_map"][platform] - return platform + if not PLUGIN_CFG.get("platform_slug_map"): + return platform + return PLUGIN_CFG.get("platform_slug_map").get(platform, platform) diff --git a/nautobot_golden_config/views.py b/nautobot_golden_config/views.py index 9ee2daa6..16f9eaa3 100644 --- a/nautobot_golden_config/views.py +++ b/nautobot_golden_config/views.py @@ -1,15 +1,17 @@ """Django views for Nautobot Golden Configuration.""" -from datetime import datetime +from datetime import datetime, timezone import base64 import io import json import logging import urllib +import difflib import yaml import matplotlib.pyplot as plt import numpy as np +from packaging.version import Version from django.contrib import messages from django.db.models import F, Q, Max @@ -17,8 +19,10 @@ from django.shortcuts import render, redirect from django_pivot.pivot import pivot +import nautobot from nautobot.dcim.models import Device from nautobot.core.views import generic + from nautobot.utilities.utils import csv_format from nautobot.utilities.error_handlers import handle_protectederror from nautobot.utilities.views import ContentTypePermissionRequiredMixin @@ -32,6 +36,7 @@ GREEN = "#D5E8D4" RED = "#F8CECC" +NAUTOBOT_VERSION = Version(nautobot.__version__) # # GoldenConfig @@ -44,13 +49,17 @@ class GoldenConfigListView(generic.ObjectListView): table = tables.GoldenConfigTable filterset = filters.GoldenConfigFilter filterset_form = forms.GoldenConfigFilterForm - queryset = models.GoldenConfig.objects.all() + queryset = Device.objects.all() template_name = "nautobot_golden_config/goldenconfig_list.html" def extra_context(self): """Boilerplace code to modify data before returning.""" return CONFIG_FEATURES + def alter_queryset(self, request): + """Build actual runtime queryset as the build time queryset provides no information.""" + return self.queryset.filter(id__in=models.GoldenConfigSetting.objects.first().get_queryset()) + class GoldenConfigBulkDeleteView(generic.BulkDeleteView): """Standard view for bulk deletion of data.""" @@ -258,41 +267,27 @@ def get_required_permission(self): """Manually set permission when not tied to a model for config details.""" return "nautobot_golden_config.view_goldenconfig" - def get(self, request, pk, config_type): # pylint: disable=invalid-name,too-many-branches + def get( + self, request, pk, config_type + ): # pylint: disable=invalid-name,too-many-branches,too-many-locals,too-many-statements """Read request into a view of a single device.""" + + def diff_structured_data(backup_data, intended_data): + """Utility function to provide `Unix Diff` between two JSON snippets.""" + backup_yaml = yaml.safe_dump(json.loads(backup_data)) + intend_yaml = yaml.safe_dump(json.loads(intended_data)) + + for line in difflib.unified_diff(backup_yaml.splitlines(), intend_yaml.splitlines(), lineterm=""): + yield line + device = Device.objects.get(pk=pk) config_details = models.GoldenConfig.objects.filter(device=device).first() + if not config_details and config_type == "json_compliance": + # Create the GoldenConfig object for the device only for JSON compliance. + config_details = models.GoldenConfig.objects.create(device=device) structure_format = "json" - if not config_details: - output = "" - elif config_type == "backup": - output = config_details.backup_config - elif config_type == "intended": - output = config_details.intended_config - elif config_type == "compliance": - output = config_details.compliance_config - if config_details.backup_last_success_date: - backup_date = str(config_details.backup_last_success_date.strftime("%b %d %Y")) - else: - backup_date = datetime.now().strftime("%b %d %Y") - if config_details.intended_last_success_date: - intended_date = str(config_details.intended_last_success_date.strftime("%b %d %Y")) - else: - intended_date = datetime.now().strftime("%b %d %Y") - first_occurence = output.index("@@") - second_occurence = output.index("@@", first_occurence) - # This is logic to match diff2html's expected input. - output = ( - "--- Backup File - " - + backup_date - + "\n+++ Intended File - " - + intended_date - + "\n" - + output[first_occurence:second_occurence] - + "@@" - + output[second_occurence + 2 :] - ) - elif config_type == "sotagg": + + if config_type == "sotagg": if request.GET.get("format") in ["json", "yaml"]: structure_format = request.GET.get("format") @@ -303,10 +298,78 @@ def get(self, request, pk, config_type): # pylint: disable=invalid-name,too-man output = yaml.dump(json.loads(json.dumps(output)), default_flow_style=False) else: output = json.dumps(output, indent=4) + elif not config_details: + output = "" + elif config_type == "backup": + output = config_details.backup_config + elif config_type == "intended": + output = config_details.intended_config + # Compliance type is broken up into JSON(json_compliance) and CLI(compliance) compliance. + elif "compliance" in config_type: + if config_type == "compliance": + # This section covers the steps to run regular CLI compliance which is a diff of 2 files (backup and intended). + diff_type = "File" + output = config_details.compliance_config + if config_details.backup_last_success_date: + backup_date = str(config_details.backup_last_success_date.strftime("%b %d %Y")) + else: + backup_date = datetime.now().strftime("%b %d %Y") + if config_details.intended_last_success_date: + intended_date = str(config_details.intended_last_success_date.strftime("%b %d %Y")) + else: + intended_date = datetime.now().strftime("%b %d %Y") + elif config_type == "json_compliance": + # The JSON compliance runs differently then CLI, it grabs all configcompliance objects for + # a given device and merges them, sorts them, and diffs them. + diff_type = "JSON" + # Get all compliance objects for a device. + compliance_objects = models.ConfigCompliance.objects.filter(device=device.id) + actual = {} + intended = {} + # Set a starting time that will be older than all last updated objects in compliance objects. + most_recent_time = datetime(1970, 1, 1, tzinfo=timezone.utc) + # Loop through config compliance objects and merge the data into one dataset. + for obj in compliance_objects: + actual[obj.rule.feature.slug] = obj.actual + intended[obj.rule.feature.slug] = obj.intended + # Update most_recent_time each time the compliance objects time is more recent then previous. + if obj.last_updated > most_recent_time: + most_recent_time = obj.last_updated + config_details.compliance_last_attempt_date = most_recent_time + config_details.compliance_last_success_date = most_recent_time + # Generate the diff between both JSON objects and sort keys for accurate diff. + config_details.compliance_config = "\n".join( + diff_structured_data(json.dumps(actual, sort_keys=True), json.dumps(intended, sort_keys=True)) + ) + config_details.save() + output = config_details.compliance_config + backup_date = intended_date = str(most_recent_time.strftime("%b %d %Y")) + if output == "": + # This is used if all config snippets are in compliance and no diff exist. + output = f"--- Backup {diff_type} - " + backup_date + f"\n+++ Intended {diff_type} - " + intended_date + else: + first_occurence = output.index("@@") + second_occurence = output.index("@@", first_occurence) + # This is logic to match diff2html's expected input. + output = ( + f"--- Backup {diff_type} - " + + backup_date + + f"\n+++ Intended {diff_type} - " + + intended_date + + "\n" + + output[first_occurence:second_occurence] + + "@@" + + output[second_occurence + 2 :] + ) template_name = "nautobot_golden_config/configcompliancedetails.html" if request.GET.get("modal") == "true": template_name = "nautobot_golden_config/configcompliancedetails_modal.html" + include_file = "extras/inc/json_format.html" + + # Nautobot core update template name, for backwards compat + if NAUTOBOT_VERSION < Version("1.1"): + include_file = "extras/inc/configcontext_format.html" return render( request, @@ -317,6 +380,7 @@ def get(self, request, pk, config_type): # pylint: disable=invalid-name,too-man "config_type": config_type, "format": structure_format, "device": device, + "include_file": include_file, }, ) diff --git a/poetry.lock b/poetry.lock index 4faa37d0..68497dae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,25 @@ +[[package]] +name = "aiocontextvars" +version = "0.2.2" +description = "Asyncio support for PEP-567 contextvars backport." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +contextvars = {version = "2.4", markers = "python_version < \"3.7\""} + +[[package]] +name = "amqp" +version = "5.0.6" +description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +vine = "5.0.0" + [[package]] name = "aniso8601" version = "7.0.0" @@ -30,7 +52,7 @@ tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] [[package]] name = "astroid" -version = "2.6.2" +version = "2.6.6" description = "An abstract syntax tree for Python with inference support." category = "main" optional = false @@ -87,6 +109,14 @@ six = ">=1.4.1" tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "billiard" +version = "3.6.4.0" +description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "black" version = "20.8b1" @@ -110,6 +140,65 @@ typing-extensions = ">=3.7.4" colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] +[[package]] +name = "cached-property" +version = "1.5.2" +description = "A decorator for caching properties in classes." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "celery" +version = "5.1.2" +description = "Distributed Task Queue." +category = "main" +optional = false +python-versions = ">=3.6," + +[package.dependencies] +billiard = ">=3.6.4.0,<4.0" +click = ">=7.0,<8.0" +click-didyoumean = ">=0.0.3" +click-plugins = ">=1.1.1" +click-repl = ">=0.1.6" +kombu = ">=5.1.0,<6.0" +pytz = ">0.0-dev" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=1.3.2)"] +auth = ["cryptography"] +azureblockblob = ["azure-storage-blob (==12.6.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (<3.21.0)"] +consul = ["python-consul2"] +cosmosdbsql = ["pydocumentdb (==2.3.2)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb"] +django = ["Django (>=1.11)"] +dynamodb = ["boto3 (>=1.9.178)"] +elasticsearch = ["elasticsearch"] +eventlet = ["eventlet (>=0.26.1)"] +gevent = ["gevent (>=1.0.0)"] +librabbitmq = ["librabbitmq (>=1.5.0)"] +memcache = ["pylibmc"] +mongodb = ["pymongo[srv] (>=3.3.0)"] +msgpack = ["msgpack"] +pymemcache = ["python-memcached"] +pyro = ["pyro4"] +pytest = ["pytest-celery"] +redis = ["redis (>=3.2.0)"] +s3 = ["boto3 (>=1.9.125)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem"] +sqlalchemy = ["sqlalchemy"] +sqs = ["boto3 (>=1.9.125)", "pycurl (==7.43.0.5)"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard"] + [[package]] name = "certifi" version = "2021.5.30" @@ -131,7 +220,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.1" +version = "2.0.6" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -142,8 +231,8 @@ unicode_backport = ["unicodedata2"] [[package]] name = "ciscoconfparse" -version = "1.5.36" -description = "Parse, Audit, Query, Build, and Modify Cisco IOS-style configurations" +version = "1.5.46" +description = "" category = "main" optional = false python-versions = "*" @@ -151,19 +240,54 @@ python-versions = "*" [package.dependencies] colorama = "*" dnspython = "*" +loguru = "*" passlib = "*" [[package]] name = "click" -version = "8.0.1" +version = "7.1.2" description = "Composable command line interface toolkit" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "click-didyoumean" +version = "0.0.3" +description = "Enable git-like did-you-mean feature in click." +category = "main" +optional = false +python-versions = "*" [package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +click = "*" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["pytest (>=3.6)", "pytest-cov", "wheel", "coveralls"] + +[[package]] +name = "click-repl" +version = "0.2.0" +description = "REPL plugin for Click" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = "*" +prompt-toolkit = "*" +six = "*" [[package]] name = "colorama" @@ -173,6 +297,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "contextvars" +version = "2.4" +description = "PEP 567 Backport" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +immutables = ">=0.9" + [[package]] name = "coreapi" version = "2.3.3" @@ -200,7 +335,7 @@ jinja2 = "*" [[package]] name = "cryptography" -version = "3.4.7" +version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -236,6 +371,20 @@ category = "main" optional = false python-versions = ">=3.6, <3.7" +[[package]] +name = "deepdiff" +version = "5.5.0" +description = "Deep Difference and Search of any Python object/data." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +ordered-set = "4.0.2" + +[package.extras] +cli = ["click (==7.1.2)", "pyyaml (==5.4)", "toml (==0.10.2)", "clevercsv (==0.6.7)"] + [[package]] name = "defusedxml" version = "0.7.1" @@ -309,9 +458,20 @@ python-versions = ">=3.5" cryptography = "*" django-appconf = "*" +[[package]] +name = "django-db-file-storage" +version = "0.5.5" +description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Django = "*" + [[package]] name = "django-debug-toolbar" -version = "3.2.1" +version = "3.2.2" description = "A configurable set of panels that display various debug information about the current request/response." category = "main" optional = false @@ -332,6 +492,29 @@ python-versions = ">=3.5" [package.dependencies] Django = ">=2.2" +[[package]] +name = "django-health-check" +version = "3.16.4" +description = "Run checks on services like databases, queue servers, celery processes, etc." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +django = ">=2.2" + +[[package]] +name = "django-jinja" +version = "2.7.1" +description = "Jinja2 templating language integrated in Django." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +django = ">=2.2" +jinja2 = ">=2.10" + [[package]] name = "django-js-asset" version = "1.2.2" @@ -553,7 +736,7 @@ smmap = ">=3.0.1,<5" [[package]] name = "gitpython" -version = "3.1.18" +version = "3.1.20" description = "Python Git Library" category = "main" optional = false @@ -561,11 +744,11 @@ python-versions = ">=3.6" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] name = "graphene" -version = "2.1.8" +version = "2.1.9" description = "GraphQL Framework for Python" category = "main" optional = false @@ -580,7 +763,7 @@ six = ">=1.10.0,<2" [package.extras] django = ["graphene-django"] sqlalchemy = ["graphene-sqlalchemy"] -test = ["pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "snapshottest", "coveralls", "promise", "six", "mock", "pytz", "iso8601"] +test = ["pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "fastdiff (==0.2.0)", "snapshottest", "coveralls", "promise", "six", "mock", "pytz", "iso8601"] [[package]] name = "graphene-django" @@ -642,6 +825,20 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "immutables" +version = "0.16" +description = "Immutable Collections" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} + +[package.extras] +test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"] + [[package]] name = "importlib-metadata" version = "3.4.0" @@ -660,7 +857,7 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake [[package]] name = "importlib-resources" -version = "5.2.0" +version = "5.2.2" description = "Read resources from Python packages" category = "main" optional = false @@ -744,16 +941,16 @@ format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator [[package]] name = "junos-eznc" -version = "2.6.2" +version = "2.5.4" description = "Junos 'EZ' automation for non-programmers" category = "main" optional = false -python-versions = ">=3.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] jinja2 = ">=2.7.1" lxml = ">=3.2.4" -ncclient = "0.6.9" +ncclient = ">=0.6.3" netaddr = "*" paramiko = ">=1.15.2" pyparsing = "*" @@ -772,6 +969,36 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "kombu" +version = "5.1.0" +description = "Messaging library for Python." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +amqp = ">=5.0.6,<6.0.0" +cached-property = {version = "*", markers = "python_version < \"3.8\""} +importlib-metadata = {version = ">=0.18", markers = "python_version < \"3.8\""} +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.0.0)"] +azurestoragequeues = ["azure-storage-queue"] +consul = ["python-consul (>=0.6.0)"] +librabbitmq = ["librabbitmq (>=1.5.2)"] +mongodb = ["pymongo (>=3.3.0)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=3.3.11)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy"] +sqs = ["boto3 (>=1.4.4)", "pycurl (==7.43.0.2)", "urllib3 (<1.26)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] + [[package]] name = "lazy-object-proxy" version = "1.6.0" @@ -780,6 +1007,22 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +[[package]] +name = "loguru" +version = "0.5.3" +description = "Python logging made (stupidly) simple" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +aiocontextvars = {version = ">=0.2.0", markers = "python_version < \"3.7\""} +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "tox (>=3.9.0)", "tox-travis (>=0.12)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "Sphinx (>=2.2.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "black (>=19.10b0)", "isort (>=5.1.1)"] + [[package]] name = "lxml" version = "4.6.3" @@ -875,18 +1118,22 @@ textfsm = "*" [[package]] name = "nautobot" -version = "1.0.3" +version = "1.1.3" description = "Source of truth and network automation platform." category = "main" optional = false python-versions = ">=3.6,<4.0" [package.dependencies] +celery = ">=5.1.0,<5.2.0" Django = ">=3.1.12,<3.2.0" django-cacheops = ">=5.1,<5.2" django-cors-headers = ">=3.7.0,<3.8.0" django-cryptography = ">=1.0,<1.1" +django-db-file-storage = ">=0.5.5,<0.6.0" django-filter = ">=2.4.0,<2.5.0" +django-health-check = ">=3.16.4,<4.0.0" +django-jinja = "<2.8.0" django-mptt = ">=0.11.0,<0.12.0" django-prometheus = ">=2.1.0,<2.2.0" django-redis = ">=4.12.1,<4.13.0" @@ -901,9 +1148,10 @@ GitPython = ">=3.1.15,<3.2.0" graphene-django = ">=2.15.0,<2.16.0" importlib-metadata = {version = ">=3.4.0,<3.5.0", markers = "python_version < \"3.8\""} Jinja2 = ">=2.11.3,<2.12.0" +jsonschema = ">=3.2.0,<3.3.0" Markdown = ">=3.3.4,<3.4.0" netaddr = ">=0.8.0,<0.9.0" -Pillow = ">=8.1.0,<8.2.0" +Pillow = ">=8.3.2,<8.4.0" psycopg2-binary = ">=2.8.6,<2.9.0" pycryptodome = ">=3.10.1,<3.11.0" pyuwsgi = ">=2.0.19.1.post0,<2.1.0.0" @@ -911,9 +1159,12 @@ PyYAML = ">=5.4.1,<5.5.0" social-auth-app-django = ">=4.0.0,<5.0.0" svgwrite = ">=1.4.1,<1.5.0" +[package.extras] +mysql = ["mysqlclient (>=2.0.3,<2.1.0)"] + [[package]] name = "nautobot-plugin-nornir" -version = "0.9.3" +version = "0.9.4" description = "Nautobot Nornir plugin." category = "main" optional = false @@ -925,7 +1176,7 @@ nornir-nautobot = ">=2.0.0,<3.0.0" [[package]] name = "ncclient" -version = "0.6.9" +version = "0.6.12" description = "Python library for NETCONF clients" category = "main" optional = false @@ -968,7 +1219,7 @@ test = ["pyyaml (>=5.1.2)", "pytest (>=5.1.2)"] [[package]] name = "netutils" -version = "0.2.1" +version = "0.2.2" description = "Common helper functions useful in network automation." category = "main" optional = false @@ -1058,7 +1309,7 @@ nornir = ">=3,<4" [[package]] name = "ntc-templates" -version = "2.1.0" +version = "2.3.2" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." category = "main" optional = false @@ -1088,6 +1339,14 @@ rsa = ["cryptography (>=3.0.0,<4)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0,<4)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "ordered-set" +version = "4.0.2" +description = "A set that remembers its order, and allows looking up its items by their index in that order." +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "packaging" version = "21.0" @@ -1134,11 +1393,11 @@ totp = ["cryptography"] [[package]] name = "pathspec" -version = "0.8.1" +version = "0.9.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "pbr" @@ -1150,7 +1409,7 @@ python-versions = ">=2.6" [[package]] name = "pillow" -version = "8.1.2" +version = "8.3.2" description = "Python Imaging Library (Fork)" category = "main" optional = false @@ -1181,6 +1440,17 @@ six = "*" [package.extras] test = ["pytest (>=2.7.3)", "pytest-cov", "coveralls", "futures", "pytest-benchmark", "mock"] +[[package]] +name = "prompt-toolkit" +version = "3.0.3" +description = "Library for building powerful interactive command lines in Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +wcwidth = "*" + [[package]] name = "psycopg2-binary" version = "2.8.6" @@ -1324,7 +1594,7 @@ tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] [[package]] name = "pynautobot" -version = "1.0.2" +version = "1.0.3" description = "Nautobot API client library" category = "main" optional = false @@ -1365,7 +1635,7 @@ cp2110 = ["hidapi"] [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -1426,7 +1696,7 @@ hiredis = ["hiredis (>=0.1.3)"] [[package]] name = "regex" -version = "2021.7.6" +version = "2021.8.28" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -1467,7 +1737,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rq" -version = "1.9.0" +version = "1.10.0" description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." category = "main" optional = false @@ -1510,7 +1780,7 @@ python-versions = "*" [[package]] name = "scp" -version = "0.13.6" +version = "0.14.1" description = "scp module for paramiko" category = "main" optional = false @@ -1521,7 +1791,7 @@ paramiko = "*" [[package]] name = "singledispatch" -version = "3.6.2" +version = "3.7.0" description = "Backport functools.singledispatch from Python 3.4 to Python 2.6-3.3." category = "main" optional = false @@ -1596,7 +1866,7 @@ saml = ["python3-saml (>=1.2.1)"] [[package]] name = "sqlparse" -version = "0.4.1" +version = "0.4.2" description = "A non-validating SQL parser." category = "main" optional = false @@ -1604,7 +1874,7 @@ python-versions = ">=3.5" [[package]] name = "stevedore" -version = "3.3.0" +version = "3.4.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1676,7 +1946,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "transitions" -version = "0.8.8" +version = "0.8.9" description = "A lightweight, object-oriented Python state machine implementation with many extensions." category = "main" optional = false @@ -1699,7 +1969,7 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "3.10.0.0" +version = "3.10.0.2" description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" optional = false @@ -1726,6 +1996,33 @@ brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "win32-setctime" +version = "1.0.3" +description = "A small Python utility to set file creation time on Windows" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"] + [[package]] name = "wrapt" version = "1.12.1" @@ -1734,6 +2031,18 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "yamllint" +version = "1.26.3" +description = "A linter for YAML files." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +pathspec = ">=0.5.3" +pyyaml = "*" + [[package]] name = "yamlordereddictloader" version = "0.4.0" @@ -1760,9 +2069,17 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "66e595bea5b6e950a015d99b66e33041bfa2dd06a5a9837f04f92066684effd6" +content-hash = "3030f4c4b6b3552026b38ac3cfaa31c771ffe1c2f8b2898eaeff9d36bea38915" [metadata.files] +aiocontextvars = [ + {file = "aiocontextvars-0.2.2-py2.py3-none-any.whl", hash = "sha256:885daf8261818767d8f7cbd79f9d4482d118f024b6586ef6e67980236a27bfa3"}, + {file = "aiocontextvars-0.2.2.tar.gz", hash = "sha256:f027372dc48641f683c559f247bd84962becaacdc9ba711d583c3871fb5652aa"}, +] +amqp = [ + {file = "amqp-5.0.6-py3-none-any.whl", hash = "sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb"}, + {file = "amqp-5.0.6.tar.gz", hash = "sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2"}, +] aniso8601 = [ {file = "aniso8601-7.0.0-py2.py3-none-any.whl", hash = "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b"}, {file = "aniso8601-7.0.0.tar.gz", hash = "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e"}, @@ -1776,8 +2093,8 @@ asgiref = [ {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, ] astroid = [ - {file = "astroid-2.6.2-py3-none-any.whl", hash = "sha256:606b2911d10c3dcf35e58d2ee5c97360e8477d7b9f3efc3f24811c93e6fc2cd9"}, - {file = "astroid-2.6.2.tar.gz", hash = "sha256:38b95085e9d92e2ca06cf8b35c12a74fa81da395a6f9e65803742e6509c05892"}, + {file = "astroid-2.6.6-py3-none-any.whl", hash = "sha256:ab7f36e8a78b8e54a62028ba6beef7561db4cdb6f2a5009ecc44a6f42b5697ef"}, + {file = "astroid-2.6.6.tar.gz", hash = "sha256:3975a0bd5373bdce166e60c851cfcbaf21ee96de80ec518c1f4cb3e94c3fb334"}, ] attrs = [ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, @@ -1796,9 +2113,21 @@ bcrypt = [ {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, ] +billiard = [ + {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, + {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, +] black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] +cached-property = [ + {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, + {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, +] +celery = [ + {file = "celery-5.1.2-py3-none-any.whl", hash = "sha256:9dab2170b4038f7bf10ef2861dbf486ddf1d20592290a1040f7b7a1259705d42"}, + {file = "celery-5.1.2.tar.gz", hash = "sha256:8d9a3de9162965e97f8e8cc584c67aad83b3f7a267584fa47701ed11c3e0d4b0"}, +] certifi = [ {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, @@ -1851,21 +2180,35 @@ cffi = [ {file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.1.tar.gz", hash = "sha256:ad0da505736fc7e716a8da15bf19a985db21ac6415c26b34d2fafd3beb3d927e"}, - {file = "charset_normalizer-2.0.1-py3-none-any.whl", hash = "sha256:b68b38179052975093d71c1b5361bf64afd80484697c1f27056e50593e695ceb"}, + {file = "charset-normalizer-2.0.6.tar.gz", hash = "sha256:5ec46d183433dcbd0ab716f2d7f29d8dee50505b3fdb40c6b985c7c4f5a3591f"}, + {file = "charset_normalizer-2.0.6-py3-none-any.whl", hash = "sha256:5d209c0a931f215cee683b6445e2d77677e7e75e159f78def0db09d68fafcaa6"}, ] ciscoconfparse = [ - {file = "ciscoconfparse-1.5.36-py3-none-any.whl", hash = "sha256:c17ec6106698209477065a54845c4c5baafeb06921afb0a04e8208517b39b2da"}, - {file = "ciscoconfparse-1.5.36.tar.gz", hash = "sha256:e06ef83fe77d62d7d7d5b89d2f95a9bbe04b5ca059432e59e7b1549a672ef874"}, + {file = "ciscoconfparse-1.5.46-py3-none-any.whl", hash = "sha256:3e04ebbd8afefa3b653a7e3fd56ab4d8a1485f21c5a742a0a865e562f2cd4de2"}, + {file = "ciscoconfparse-1.5.46.tar.gz", hash = "sha256:3eb0327ee341927be06ec5bc3f53e30edd280dcd42513fd2a0c7b1de64f02f46"}, ] click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +click-didyoumean = [ + {file = "click-didyoumean-0.0.3.tar.gz", hash = "sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb"}, +] +click-plugins = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] +click-repl = [ + {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, + {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +contextvars = [ + {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, +] coreapi = [ {file = "coreapi-2.3.3-py2.py3-none-any.whl", hash = "sha256:bf39d118d6d3e171f10df9ede5666f63ad80bba9a29a8ec17726a66cf52ee6f3"}, {file = "coreapi-2.3.3.tar.gz", hash = "sha256:46145fcc1f7017c076a2ef684969b641d18a2991051fddec9458ad3f78ffc1cb"}, @@ -1875,18 +2218,23 @@ coreschema = [ {file = "coreschema-0.0.4.tar.gz", hash = "sha256:9503506007d482ab0867ba14724b93c18a33b22b6d19fb419ef2d239dd4a1607"}, ] cryptography = [ - {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, - {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, - {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, - {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, ] cycler = [ {file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"}, @@ -1896,6 +2244,10 @@ dataclasses = [ {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, {file = "dataclasses-0.7.tar.gz", hash = "sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6"}, ] +deepdiff = [ + {file = "deepdiff-5.5.0-py3-none-any.whl", hash = "sha256:e054fed9dfe0d83d622921cbb3a3d0b3a6dd76acd2b6955433a0a2d35147774a"}, + {file = "deepdiff-5.5.0.tar.gz", hash = "sha256:dd79b81c2d84bfa33aa9d94d456b037b68daff6bb87b80dfaa1eca04da68b349"}, +] defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -1920,14 +2272,25 @@ django-cryptography = [ {file = "django-cryptography-1.0.tar.gz", hash = "sha256:13de5cf8f1250744c104b9e24774d03aa6d8488959dd40cdc016934043652445"}, {file = "django_cryptography-1.0-py3-none-any.whl", hash = "sha256:0a99980b1cee7cc5e52f9b20b322620fea7cc124d770273e7bd285b20fd9d222"}, ] +django-db-file-storage = [ + {file = "django-db-file-storage-0.5.5.tar.gz", hash = "sha256:5d5da694b78ab202accab4508b958e0e37b3d146310e76f6f6125e1bdeaaad14"}, +] django-debug-toolbar = [ - {file = "django-debug-toolbar-3.2.1.tar.gz", hash = "sha256:a5ff2a54f24bf88286f9872836081078f4baa843dc3735ee88524e89f8821e33"}, - {file = "django_debug_toolbar-3.2.1-py3-none-any.whl", hash = "sha256:e759e63e3fe2d3110e0e519639c166816368701eab4a47fed75d7de7018467b9"}, + {file = "django-debug-toolbar-3.2.2.tar.gz", hash = "sha256:8c5b13795d4040008ee69ba82dcdd259c49db346cf7d0de6e561a49d191f0860"}, + {file = "django_debug_toolbar-3.2.2-py3-none-any.whl", hash = "sha256:d7bab7573fab35b0fd029163371b7182f5826c13da69734beb675c761d06a4d3"}, ] django-filter = [ {file = "django-filter-2.4.0.tar.gz", hash = "sha256:84e9d5bb93f237e451db814ed422a3a625751cbc9968b484ecc74964a8696b06"}, {file = "django_filter-2.4.0-py3-none-any.whl", hash = "sha256:e00d32cebdb3d54273c48f4f878f898dced8d5dfaad009438fe61ebdf535ace1"}, ] +django-health-check = [ + {file = "django-health-check-3.16.4.tar.gz", hash = "sha256:334bcbbb9273a6dbd9c928e78474306e623dfb38cc442281cb9fd230a20a7fdb"}, + {file = "django_health_check-3.16.4-py2.py3-none-any.whl", hash = "sha256:86a8869d67e72394a1dd73e37819a7d2cfd915588b96927fda611d7451fd4735"}, +] +django-jinja = [ + {file = "django-jinja-2.7.1.tar.gz", hash = "sha256:0d2c90ccc4763f67b07ace2b8a2f23df16d2995b4dc841597443fb4eea746505"}, + {file = "django_jinja-2.7.1-py3-none-any.whl", hash = "sha256:fa7650ca289544caf441cc922876986cc0df8f111047aaef6836a978c0c66304"}, +] django-js-asset = [ {file = "django-js-asset-1.2.2.tar.gz", hash = "sha256:c163ae80d2e0b22d8fb598047cd0dcef31f81830e127cfecae278ad574167260"}, {file = "django_js_asset-1.2.2-py2.py3-none-any.whl", hash = "sha256:8ec12017f26eec524cab436c64ae73033368a372970af4cf42d9354fcb166bdd"}, @@ -1996,12 +2359,12 @@ gitdb = [ {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ - {file = "GitPython-3.1.18-py3-none-any.whl", hash = "sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8"}, - {file = "GitPython-3.1.18.tar.gz", hash = "sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b"}, + {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, + {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, ] graphene = [ - {file = "graphene-2.1.8-py2.py3-none-any.whl", hash = "sha256:09165f03e1591b76bf57b133482db9be6dac72c74b0a628d3c93182af9c5a896"}, - {file = "graphene-2.1.8.tar.gz", hash = "sha256:2cbe6d4ef15cfc7b7805e0760a0e5b80747161ce1b0f990dfdc0d2cf497c12f9"}, + {file = "graphene-2.1.9-py2.py3-none-any.whl", hash = "sha256:3d446eb1237c551052bc31155cf1a3a607053e4f58c9172b83a1b597beaa0868"}, + {file = "graphene-2.1.9.tar.gz", hash = "sha256:b9f2850e064eebfee9a3ef4a1f8aa0742848d97652173ab44c82cc8a62b9ed93"}, ] graphene-django = [ {file = "graphene-django-2.15.0.tar.gz", hash = "sha256:b78c9b05bc899016b9cc5bf13faa1f37fe1faa8c5407552c6ddd1a28f46fc31a"}, @@ -2019,13 +2382,42 @@ idna = [ {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, ] +immutables = [ + {file = "immutables-0.16-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:acbfa79d44228d96296279068441f980dc63dbed52522d9227ff9f4d96c6627e"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c9ed003eacb92e630ef200e31f47236c2139b39476894f7963b32bd39bafa3"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a396314b9024fa55bf83a27813fd76cf9f27dce51f53b0f19b51de035146251"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4a2a71678348fb95b13ca108d447f559a754c41b47bd1e7e4fb23974e735682d"}, + {file = "immutables-0.16-cp36-cp36m-win32.whl", hash = "sha256:064001638ab5d36f6aa05b6101446f4a5793fb71e522bc81b8fc65a1894266ff"}, + {file = "immutables-0.16-cp36-cp36m-win_amd64.whl", hash = "sha256:1de393f1b188740ca7b38f946f2bbc7edf3910d2048f03bbb8d01f17a038d67c"}, + {file = "immutables-0.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fcf678a3074613119385a02a07c469ec5130559f5ea843c85a0840c80b5b71c6"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a307eb0984eb43e815dcacea3ac50c11d00a936ecf694c46991cd5a23bcb0ec0"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a58825ff2254e2612c5a932174398a4ea8fbddd8a64a02c880cc32ee28b8820"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:798b095381eb42cf40db6876339e7bed84093e5868018a9e73d8e1f7ab4bb21e"}, + {file = "immutables-0.16-cp37-cp37m-win32.whl", hash = "sha256:19bdede174847c2ef1292df0f23868ab3918b560febb09fcac6eec621bd4812b"}, + {file = "immutables-0.16-cp37-cp37m-win_amd64.whl", hash = "sha256:9ccf4c0e3e2e3237012b516c74c49de8872ccdf9129739f7a0b9d7444a8c4862"}, + {file = "immutables-0.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d59beef203a3765db72b1d0943547425c8318ecf7d64c451fd1e130b653c2fbb"}, + {file = "immutables-0.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0020aaa4010b136056c20a46ce53204e1407a9e4464246cb2cf95b90808d9161"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edd9f67671555af1eb99ad3c7550238487dd7ac0ac5205b40204ed61c9a922ac"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:298a301f85f307b4c056a0825eb30f060e64d73605e783289f3df37dd762bab8"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b779617f5b94486bfd0f22162cd72eb5f2beb0214a14b75fdafb7b2c908ed0cb"}, + {file = "immutables-0.16-cp38-cp38-win32.whl", hash = "sha256:511c93d8b1bbbf103ff3f1f120c5a68a9866ce03dea6ac406537f93ca9b19139"}, + {file = "immutables-0.16-cp38-cp38-win_amd64.whl", hash = "sha256:b651b61c1af6cda2ee201450f2ffe048a5959bc88e43e6c312f4c93e69c9e929"}, + {file = "immutables-0.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aa7bf572ae1e006104c584be70dc634849cf0dc62f42f4ee194774f97e7fd17d"}, + {file = "immutables-0.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50793a44ba0d228ed8cad4d0925e00dfd62ea32f44ddee8854f8066447272d05"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799621dcdcdcbb2516546a40123b87bf88de75fe7459f7bd8144f079ace6ec3e"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7bcf52aeb983bd803b7c6106eae1b2d9a0c7ab1241bc6b45e2174ba2b7283031"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:734c269e82e5f307fb6e17945953b67659d1731e65309787b8f7ba267d1468f2"}, + {file = "immutables-0.16-cp39-cp39-win32.whl", hash = "sha256:a454d5d3fee4b7cc627345791eb2ca4b27fa3bbb062ccf362ecaaa51679a07ed"}, + {file = "immutables-0.16-cp39-cp39-win_amd64.whl", hash = "sha256:2505d93395d3f8ae4223e21465994c3bc6952015a38dc4f03cb3e07a2b8d8325"}, + {file = "immutables-0.16.tar.gz", hash = "sha256:d67e86859598eed0d926562da33325dac7767b7b1eff84e232c22abea19f4360"}, +] importlib-metadata = [ {file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"}, {file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"}, ] importlib-resources = [ - {file = "importlib_resources-5.2.0-py3-none-any.whl", hash = "sha256:a0143290bef3cbc99de9e40176e4987780939a955b8632f02ce6c935f42e9bfc"}, - {file = "importlib_resources-5.2.0.tar.gz", hash = "sha256:22a2c42d8c6a1d30aa8a0e1f57293725bfd5c013d562585e46aff469e0ff78b3"}, + {file = "importlib_resources-5.2.2-py3-none-any.whl", hash = "sha256:2480d8e07d1890056cb53c96e3de44fead9c62f2ba949b0f2e4c4345f4afa977"}, + {file = "importlib_resources-5.2.2.tar.gz", hash = "sha256:a65882a4d0fe5fbf702273456ba2ce74fe44892c25e42e057aca526b702a6d4b"}, ] inflection = [ {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, @@ -2053,8 +2445,8 @@ jsonschema = [ {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] junos-eznc = [ - {file = "junos-eznc-2.6.2.tar.gz", hash = "sha256:878c479c933346cc8cc60b6d145973568ac23e7c453e193cf55625e7921a9b62"}, - {file = "junos_eznc-2.6.2-py2.py3-none-any.whl", hash = "sha256:bc8a5406f8583de077334d007afd884ffc9ce6f433c037bec16c1ca16726a090"}, + {file = "junos-eznc-2.5.4.tar.gz", hash = "sha256:bf036d0af9ee5c5e4f517cb5fc902fe891fa120e18f459805862c53d4a97193a"}, + {file = "junos_eznc-2.5.4-py2.py3-none-any.whl", hash = "sha256:e05c36d56d8b8d13b1fb3bb763828bb3ee80fa1dcadc3a6762e8e2568504676d"}, ] kiwisolver = [ {file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"}, @@ -2071,14 +2463,18 @@ kiwisolver = [ {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31"}, {file = "kiwisolver-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc"}, {file = "kiwisolver-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454"}, + {file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:24cc411232d14c8abafbd0dddb83e1a4f54d77770b53db72edcfe1d611b3bf11"}, {file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72"}, + {file = "kiwisolver-1.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ef6eefcf3944e75508cdfa513c06cf80bafd7d179e14c1334ebdca9ebb8c2c66"}, {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3"}, {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131"}, {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de"}, {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18"}, {file = "kiwisolver-1.3.1-cp38-cp38-win32.whl", hash = "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81"}, {file = "kiwisolver-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e"}, + {file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6d9d8d9b31aa8c2d80a690693aebd8b5e2b7a45ab065bb78f1609995d2c79240"}, {file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000"}, + {file = "kiwisolver-1.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:792e69140828babe9649de583e1a03a0f2ff39918a71782c76b3c683a67c6dfd"}, {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598"}, {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882"}, {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621"}, @@ -2088,8 +2484,13 @@ kiwisolver = [ {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d"}, {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3"}, {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6"}, + {file = "kiwisolver-1.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6563ccd46b645e966b400bb8a95d3457ca6cf3bba1e908f9e0927901dfebeb1"}, {file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"}, ] +kombu = [ + {file = "kombu-5.1.0-py3-none-any.whl", hash = "sha256:e2dedd8a86c9077c350555153825a31e456a0dc20c15d5751f00137ec9c75f0a"}, + {file = "kombu-5.1.0.tar.gz", hash = "sha256:01481d99f4606f6939cdc9b637264ed353ee9e3e4f62cfb582324142c41a572d"}, +] lazy-object-proxy = [ {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, @@ -2114,6 +2515,10 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, ] +loguru = [ + {file = "loguru-0.5.3-py3-none-any.whl", hash = "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c"}, + {file = "loguru-0.5.3.tar.gz", hash = "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319"}, +] lxml = [ {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"}, @@ -2122,6 +2527,8 @@ lxml = [ {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, + {file = "lxml-4.6.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:64812391546a18896adaa86c77c59a4998f33c24788cadc35789e55b727a37f4"}, + {file = "lxml-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c1a40c06fd5ba37ad39caa0b3144eb3772e813b5fb5b084198a985431c2f1e8d"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354"}, @@ -2167,12 +2574,22 @@ markdown = [ {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2181,14 +2598,21 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2198,6 +2622,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2242,15 +2669,15 @@ napalm = [ {file = "napalm-3.3.1.tar.gz", hash = "sha256:6fe46d5b4ca761a2bde8ea2bea3e0b808a907afc808c6c67178060eb323320e8"}, ] nautobot = [ - {file = "nautobot-1.0.3-py3-none-any.whl", hash = "sha256:d3553004dd4e0ea27d054c72b155992848a329412139548df24ad0e647f6b0a5"}, - {file = "nautobot-1.0.3.tar.gz", hash = "sha256:866c777bfa52ca062a5ed36528aa571e2baacc11f97eec7813027a0d12fef99d"}, + {file = "nautobot-1.1.3-py3-none-any.whl", hash = "sha256:826cb6b9ba97e06c6085fc8fec9436ec69590fab4ad17d9a02dbeb6347af1714"}, + {file = "nautobot-1.1.3.tar.gz", hash = "sha256:497a0ed6ab6e31566e831650d328b77db999aca161caa3ab98357351de8c87bd"}, ] nautobot-plugin-nornir = [ - {file = "nautobot-plugin-nornir-0.9.3.tar.gz", hash = "sha256:8260ded81dcd5800a3eaca0ae9c30ecaa1a095229109b76e9db3df677a6e929a"}, - {file = "nautobot_plugin_nornir-0.9.3-py3-none-any.whl", hash = "sha256:5904b04aff729ade601df7ccb7b4d33702c7629a1fa092ab7aad3175c7cb93dc"}, + {file = "nautobot-plugin-nornir-0.9.4.tar.gz", hash = "sha256:08217646a5b8c973c9379f51a9633d8b5962e95cb28afd4cfde52c3b5ce9204a"}, + {file = "nautobot_plugin_nornir-0.9.4-py3-none-any.whl", hash = "sha256:87c8863229e83a12e99aa302eaf5f9489835b8c45ea434cf622d9cb4ea1136bc"}, ] ncclient = [ - {file = "ncclient-0.6.9.tar.gz", hash = "sha256:0112f2ad41fb658f52446d870853a63691d69299c73c7351c520d38dbd8dc0c4"}, + {file = "ncclient-0.6.12.tar.gz", hash = "sha256:37c8a9f9a44f0346144119ab17ae6559e44b5a991f4c34ea3765c678079e4beb"}, ] netaddr = [ {file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"}, @@ -2261,8 +2688,8 @@ netmiko = [ {file = "netmiko-3.4.0.tar.gz", hash = "sha256:acadb9dd97864ee848e2032f1f0e301c7b31e7a4153757d98f5c8ba1b9614993"}, ] netutils = [ - {file = "netutils-0.2.1-py3-none-any.whl", hash = "sha256:8dae11f46e3d00cc6c38e97113e3ea9ef9e18f0f71cdb7c12e2c00b9f95fabcb"}, - {file = "netutils-0.2.1.tar.gz", hash = "sha256:7ed1f46d5cd7383b86a50c7a05d8f6536230c8af46fd6eceab05fdac4a621a24"}, + {file = "netutils-0.2.2-py3-none-any.whl", hash = "sha256:a0335a8d7e5210877cdc9017d2984a0abef23529b8f0147915f0ea5147b6a2c3"}, + {file = "netutils-0.2.2.tar.gz", hash = "sha256:b848ed9536e13931761ec641346f5330e18f78a91d880253cf86d860a084555b"}, ] nornir = [ {file = "nornir-3.1.1-py3-none-any.whl", hash = "sha256:217199f923c810f4a54dec8d440eb08682c8a4ea4746325bd3067dca2e32cf9f"}, @@ -2289,8 +2716,8 @@ nornir-utils = [ {file = "nornir_utils-0.1.2.tar.gz", hash = "sha256:23ae95c4805b0ce8a5ed32935f3f86027e5701175e7740ab8b3a79946c5d90b2"}, ] ntc-templates = [ - {file = "ntc_templates-2.1.0-py3-none-any.whl", hash = "sha256:b42c0d32cf33ccc2ba89b2ec4268ad43d3d872ff569ecefe727b6649adacd175"}, - {file = "ntc_templates-2.1.0.tar.gz", hash = "sha256:6ce17e48d951d531afa83ad3b68fda822a3d8937e8c955387053c501edfec41f"}, + {file = "ntc_templates-2.3.2-py3-none-any.whl", hash = "sha256:1a957a4ecc0a0091c91b20e8fa36cc9c07f7c40fbe9ac96428452fe733c2069d"}, + {file = "ntc_templates-2.3.2.tar.gz", hash = "sha256:5c09282ee98a3970904b3cbbfda861a83a14b023e7aaf5a88e711b5dfd4a16a7"}, ] numpy = [ {file = "numpy-1.19.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc6bd4fd593cb261332568485e20a0712883cf631f6f5e8e86a52caa8b2b50ff"}, @@ -2332,6 +2759,9 @@ oauthlib = [ {file = "oauthlib-3.1.1-py2.py3-none-any.whl", hash = "sha256:42bf6354c2ed8c6acb54d971fce6f88193d97297e18602a3a886603f9d7730cc"}, {file = "oauthlib-3.1.1.tar.gz", hash = "sha256:8f0215fcc533dd8dd1bee6f4c412d4f0cd7297307d43ac61666389e3bc3198a3"}, ] +ordered-set = [ + {file = "ordered-set-4.0.2.tar.gz", hash = "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"}, +] packaging = [ {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, @@ -2345,47 +2775,67 @@ passlib = [ {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, ] pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pbr = [ {file = "pbr-5.6.0-py2.py3-none-any.whl", hash = "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"}, {file = "pbr-5.6.0.tar.gz", hash = "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd"}, ] pillow = [ - {file = "Pillow-8.1.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:5cf03b9534aca63b192856aa601c68d0764810857786ea5da652581f3a44c2b0"}, - {file = "Pillow-8.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f91b50ad88048d795c0ad004abbe1390aa1882073b1dca10bfd55d0b8cf18ec5"}, - {file = "Pillow-8.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5762ebb4436f46b566fc6351d67a9b5386b5e5de4e58fdaa18a1c83e0e20f1a8"}, - {file = "Pillow-8.1.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e2cd8ac157c1e5ae88b6dd790648ee5d2777e76f1e5c7d184eaddb2938594f34"}, - {file = "Pillow-8.1.2-cp36-cp36m-win32.whl", hash = "sha256:72027ebf682abc9bafd93b43edc44279f641e8996fb2945104471419113cfc71"}, - {file = "Pillow-8.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d1d6bca39bb6dd94fba23cdb3eeaea5e30c7717c5343004d900e2a63b132c341"}, - {file = "Pillow-8.1.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:90882c6f084ef68b71bba190209a734bf90abb82ab5e8f64444c71d5974008c6"}, - {file = "Pillow-8.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:89e4c757a91b8c55d97c91fa09c69b3677c227b942fa749e9a66eef602f59c28"}, - {file = "Pillow-8.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8c4e32218c764bc27fe49b7328195579581aa419920edcc321c4cb877c65258d"}, - {file = "Pillow-8.1.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a01da2c266d9868c4f91a9c6faf47a251f23b9a862dce81d2ff583135206f5be"}, - {file = "Pillow-8.1.2-cp37-cp37m-win32.whl", hash = "sha256:30d33a1a6400132e6f521640dd3f64578ac9bfb79a619416d7e8802b4ce1dd55"}, - {file = "Pillow-8.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:71b01ee69e7df527439d7752a2ce8fb89e19a32df484a308eca3e81f673d3a03"}, - {file = "Pillow-8.1.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:5a2d957eb4aba9d48170b8fe6538ec1fbc2119ffe6373782c03d8acad3323f2e"}, - {file = "Pillow-8.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:87f42c976f91ca2fc21a3293e25bd3cd895918597db1b95b93cbd949f7d019ce"}, - {file = "Pillow-8.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:15306d71a1e96d7e271fd2a0737038b5a92ca2978d2e38b6ced7966583e3d5af"}, - {file = "Pillow-8.1.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:71f31ee4df3d5e0b366dd362007740106d3210fb6a56ec4b581a5324ba254f06"}, - {file = "Pillow-8.1.2-cp38-cp38-win32.whl", hash = "sha256:98afcac3205d31ab6a10c5006b0cf040d0026a68ec051edd3517b776c1d78b09"}, - {file = "Pillow-8.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:328240f7dddf77783e72d5ed79899a6b48bc6681f8d1f6001f55933cb4905060"}, - {file = "Pillow-8.1.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:bead24c0ae3f1f6afcb915a057943ccf65fc755d11a1410a909c1fefb6c06ad1"}, - {file = "Pillow-8.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81b3716cc9744ffdf76b39afb6247eae754186838cedad0b0ac63b2571253fe6"}, - {file = "Pillow-8.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:63cd413ac52ee3f67057223d363f4f82ce966e64906aea046daf46695e3c8238"}, - {file = "Pillow-8.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8565355a29655b28fdc2c666fd9a3890fe5edc6639d128814fafecfae2d70910"}, - {file = "Pillow-8.1.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1940fc4d361f9cc7e558d6f56ff38d7351b53052fd7911f4b60cd7bc091ea3b1"}, - {file = "Pillow-8.1.2-cp39-cp39-win32.whl", hash = "sha256:46c2bcf8e1e75d154e78417b3e3c64e96def738c2a25435e74909e127a8cba5e"}, - {file = "Pillow-8.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:aeab4cd016e11e7aa5cfc49dcff8e51561fa64818a0be86efa82c7038e9369d0"}, - {file = "Pillow-8.1.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:74cd9aa648ed6dd25e572453eb09b08817a1e3d9f8d1bd4d8403d99e42ea790b"}, - {file = "Pillow-8.1.2-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:e5739ae63636a52b706a0facec77b2b58e485637e1638202556156e424a02dc2"}, - {file = "Pillow-8.1.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:903293320efe2466c1ab3509a33d6b866dc850cfd0c5d9cc92632014cec185fb"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:5daba2b40782c1c5157a788ec4454067c6616f5a0c1b70e26ac326a880c2d328"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:1f93f2fe211f1ef75e6f589327f4d4f8545d5c8e826231b042b483d8383e8a7c"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:6efac40344d8f668b6c4533ae02a48d52fd852ef0654cc6f19f6ac146399c733"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:f36c3ff63d6fc509ce599a2f5b0d0732189eed653420e7294c039d342c6e204a"}, - {file = "Pillow-8.1.2.tar.gz", hash = "sha256:b07c660e014852d98a00a91adfbe25033898a9d90a8f39beb2437d22a203fc44"}, + {file = "Pillow-8.3.2-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:c691b26283c3a31594683217d746f1dad59a7ae1d4cfc24626d7a064a11197d4"}, + {file = "Pillow-8.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f514c2717012859ccb349c97862568fdc0479aad85b0270d6b5a6509dbc142e2"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be25cb93442c6d2f8702c599b51184bd3ccd83adebd08886b682173e09ef0c3f"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d675a876b295afa114ca8bf42d7f86b5fb1298e1b6bb9a24405a3f6c8338811c"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59697568a0455764a094585b2551fd76bfd6b959c9f92d4bdec9d0e14616303a"}, + {file = "Pillow-8.3.2-cp310-cp310-win32.whl", hash = "sha256:2d5e9dc0bf1b5d9048a94c48d0813b6c96fccfa4ccf276d9c36308840f40c228"}, + {file = "Pillow-8.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:11c27e74bab423eb3c9232d97553111cc0be81b74b47165f07ebfdd29d825875"}, + {file = "Pillow-8.3.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:11eb7f98165d56042545c9e6db3ce394ed8b45089a67124298f0473b29cb60b2"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f23b2d3079522fdf3c09de6517f625f7a964f916c956527bed805ac043799b8"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19ec4cfe4b961edc249b0e04b5618666c23a83bc35842dea2bfd5dfa0157f81b"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5a31c07cea5edbaeb4bdba6f2b87db7d3dc0f446f379d907e51cc70ea375629"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15ccb81a6ffc57ea0137f9f3ac2737ffa1d11f786244d719639df17476d399a7"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8f284dc1695caf71a74f24993b7c7473d77bc760be45f776a2c2f4e04c170550"}, + {file = "Pillow-8.3.2-cp36-cp36m-win32.whl", hash = "sha256:4abc247b31a98f29e5224f2d31ef15f86a71f79c7f4d2ac345a5d551d6393073"}, + {file = "Pillow-8.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a048dad5ed6ad1fad338c02c609b862dfaa921fcd065d747194a6805f91f2196"}, + {file = "Pillow-8.3.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:06d1adaa284696785375fa80a6a8eb309be722cf4ef8949518beb34487a3df71"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd24054aaf21e70a51e2a2a5ed1183560d3a69e6f9594a4bfe360a46f94eba83"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a330bf7014ee034046db43ccbb05c766aa9e70b8d6c5260bfc38d73103b0ba"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13654b521fb98abdecec105ea3fb5ba863d1548c9b58831dd5105bb3873569f1"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1bd983c565f92779be456ece2479840ec39d386007cd4ae83382646293d681b"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4326ea1e2722f3dc00ed77c36d3b5354b8fb7399fb59230249ea6d59cbed90da"}, + {file = "Pillow-8.3.2-cp37-cp37m-win32.whl", hash = "sha256:085a90a99404b859a4b6c3daa42afde17cb3ad3115e44a75f0d7b4a32f06a6c9"}, + {file = "Pillow-8.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:18a07a683805d32826c09acfce44a90bf474e6a66ce482b1c7fcd3757d588df3"}, + {file = "Pillow-8.3.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4e59e99fd680e2b8b11bbd463f3c9450ab799305d5f2bafb74fefba6ac058616"}, + {file = "Pillow-8.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d89a2e9219a526401015153c0e9dd48319ea6ab9fe3b066a20aa9aee23d9fd3"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fd98c8294f57636084f4b076b75f86c57b2a63a8410c0cd172bc93695ee979"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b11c9d310a3522b0fd3c35667914271f570576a0e387701f370eb39d45f08a4"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0412516dcc9de9b0a1e0ae25a280015809de8270f134cc2c1e32c4eeb397cf30"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bcb04ff12e79b28be6c9988f275e7ab69f01cc2ba319fb3114f87817bb7c74b6"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b9911ec70731711c3b6ebcde26caea620cbdd9dcb73c67b0730c8817f24711b"}, + {file = "Pillow-8.3.2-cp38-cp38-win32.whl", hash = "sha256:ce2e5e04bb86da6187f96d7bab3f93a7877830981b37f0287dd6479e27a10341"}, + {file = "Pillow-8.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35d27687f027ad25a8d0ef45dd5208ef044c588003cdcedf05afb00dbc5c2deb"}, + {file = "Pillow-8.3.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:04835e68ef12904bc3e1fd002b33eea0779320d4346082bd5b24bec12ad9c3e9"}, + {file = "Pillow-8.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10e00f7336780ca7d3653cf3ac26f068fa11b5a96894ea29a64d3dc4b810d630"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cde7a4d3687f21cffdf5bb171172070bb95e02af448c4c8b2f223d783214056"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c3ff00110835bdda2b1e2b07f4a2548a39744bb7de5946dc8e95517c4fb2ca6"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d409030bf3bd05fa66fb5fdedc39c521b397f61ad04309c90444e893d05f7d"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bff50ba9891be0a004ef48828e012babaaf7da204d81ab9be37480b9020a82b"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7dbfbc0020aa1d9bc1b0b8bcf255a7d73f4ad0336f8fd2533fcc54a4ccfb9441"}, + {file = "Pillow-8.3.2-cp39-cp39-win32.whl", hash = "sha256:963ebdc5365d748185fdb06daf2ac758116deecb2277ec5ae98139f93844bc09"}, + {file = "Pillow-8.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:cc9d0dec711c914ed500f1d0d3822868760954dce98dfb0b7382a854aee55d19"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2c661542c6f71dfd9dc82d9d29a8386287e82813b0375b3a02983feac69ef864"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:548794f99ff52a73a156771a0402f5e1c35285bd981046a502d7e4793e8facaa"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b68f565a4175e12e68ca900af8910e8fe48aaa48fd3ca853494f384e11c8bcd"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:838eb85de6d9307c19c655c726f8d13b8b646f144ca6b3771fa62b711ebf7624"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:feb5db446e96bfecfec078b943cc07744cc759893cef045aa8b8b6d6aaa8274e"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:fc0db32f7223b094964e71729c0361f93db43664dd1ec86d3df217853cedda87"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd4fd83aa912d7b89b4b4a1580d30e2a4242f3936882a3f433586e5ab97ed0d5"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0c8ebbfd439c37624db98f3877d9ed12c137cadd99dde2d2eae0dab0bbfc355"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cb3dd7f23b044b0737317f892d399f9e2f0b3a02b22b2c692851fb8120d82c6"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66566f8a22561fc1a88dc87606c69b84fa9ce724f99522cf922c801ec68f5c1"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ce651ca46d0202c302a535d3047c55a0131a720cf554a578fc1b8a2aff0e7d96"}, + {file = "Pillow-8.3.2.tar.gz", hash = "sha256:dde3f3ed8d00c72631bc19cbfff8ad3b6215062a5eed402381ad365f82f0c18c"}, ] prometheus-client = [ {file = "prometheus_client-0.11.0-py2.py3-none-any.whl", hash = "sha256:b014bc76815eb1399da8ce5fc84b7717a3e63652b0c0f8804092c9363acab1b2"}, @@ -2394,6 +2844,10 @@ prometheus-client = [ promise = [ {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, ] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.3-py3-none-any.whl", hash = "sha256:c93e53af97f630f12f5f62a3274e79527936ed466f038953dfa379d4941f651a"}, + {file = "prompt_toolkit-3.0.3.tar.gz", hash = "sha256:a402e9bf468b63314e37460b68ba68243d55b2f8c4d0192f85a019af3945050e"}, +] psycopg2-binary = [ {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"}, {file = "psycopg2_binary-2.8.6-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4"}, @@ -2519,8 +2973,8 @@ pynacl = [ {file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"}, ] pynautobot = [ - {file = "pynautobot-1.0.2-py3-none-any.whl", hash = "sha256:19dd6fbb4663440f0d890137ff58dd5206cb4452f885f3eba7874888ca22727d"}, - {file = "pynautobot-1.0.2.tar.gz", hash = "sha256:8cb0afe97a48f16301a7a4bd58e409b4517fb5c9c9fc808f2cce6ea4e99e1408"}, + {file = "pynautobot-1.0.3-py3-none-any.whl", hash = "sha256:8ef66962ad8143ff2c9b01b1f66165f18d52302e97d18c32b47a9b67e0b7fe7c"}, + {file = "pynautobot-1.0.3.tar.gz", hash = "sha256:01fc6cf32e6ad520c7847d304dde356890960a4d95de48af6a1ce1f43fac6a65"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -2554,8 +3008,8 @@ pyserial = [ {file = "pyserial-3.5.tar.gz", hash = "sha256:3c77e014170dfffbd816e6ffc205e9842efb10be9f58ec16d3e8675b4925cddb"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] python3-openid = [ {file = "python3-openid-3.2.0.tar.gz", hash = "sha256:33fbf6928f401e0b790151ed2b5290b02545e8775f982485205a066f874aaeaf"}, @@ -2617,47 +3071,47 @@ redis = [ {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, ] regex = [ - {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, - {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, - {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, - {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, - {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, - {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, - {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, - {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, - {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, - {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, - {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, - {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, - {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, + {file = "regex-2021.8.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2"}, + {file = "regex-2021.8.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a"}, + {file = "regex-2021.8.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0"}, + {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb"}, + {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a"}, + {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308"}, + {file = "regex-2021.8.28-cp310-cp310-win32.whl", hash = "sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed"}, + {file = "regex-2021.8.28-cp310-cp310-win_amd64.whl", hash = "sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8"}, + {file = "regex-2021.8.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c"}, + {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c"}, + {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13"}, + {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0"}, + {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1"}, + {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f"}, + {file = "regex-2021.8.28-cp36-cp36m-win32.whl", hash = "sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354"}, + {file = "regex-2021.8.28-cp36-cp36m-win_amd64.whl", hash = "sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645"}, + {file = "regex-2021.8.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a"}, + {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e"}, + {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892"}, + {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791"}, + {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759"}, + {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906"}, + {file = "regex-2021.8.28-cp37-cp37m-win32.whl", hash = "sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a"}, + {file = "regex-2021.8.28-cp37-cp37m-win_amd64.whl", hash = "sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc"}, + {file = "regex-2021.8.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd"}, + {file = "regex-2021.8.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797"}, + {file = "regex-2021.8.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f"}, + {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256"}, + {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b"}, + {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e"}, + {file = "regex-2021.8.28-cp38-cp38-win32.whl", hash = "sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d"}, + {file = "regex-2021.8.28-cp38-cp38-win_amd64.whl", hash = "sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2"}, + {file = "regex-2021.8.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468"}, + {file = "regex-2021.8.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb"}, + {file = "regex-2021.8.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d"}, + {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983"}, + {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8"}, + {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed"}, + {file = "regex-2021.8.28-cp39-cp39-win32.whl", hash = "sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374"}, + {file = "regex-2021.8.28-cp39-cp39-win_amd64.whl", hash = "sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73"}, + {file = "regex-2021.8.28.tar.gz", hash = "sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1"}, ] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, @@ -2669,8 +3123,8 @@ requests-oauthlib = [ {file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"}, ] rq = [ - {file = "rq-1.9.0-py2.py3-none-any.whl", hash = "sha256:7af1e9706dbe6f1eac16dffacd8271ec27c1369950941f14dab6bb08a62979d7"}, - {file = "rq-1.9.0.tar.gz", hash = "sha256:bdfef943de838955e474cfd0e25b9b8c53ed4b9c361fe4bb11cf56d17a87acc5"}, + {file = "rq-1.10.0-py2.py3-none-any.whl", hash = "sha256:92950a3e60863de48dd1800882939bbaf089a37497ebf9f2ecf7c9fd0a4c4a95"}, + {file = "rq-1.10.0.tar.gz", hash = "sha256:be09ec43fae9a75a4d26ea3cd520e5fa3ea2ea8cf481be33e6ec9416f0369cac"}, ] "ruamel.yaml" = [ {file = "ruamel.yaml-0.16.13-py2.py3-none-any.whl", hash = "sha256:64b06e7873eb8e1125525ecef7345447d786368cadca92a7cd9b59eae62e95a3"}, @@ -2704,12 +3158,12 @@ rx = [ {file = "Rx-1.6.1.tar.gz", hash = "sha256:13a1d8d9e252625c173dc795471e614eadfe1cf40ffc684e08b8fff0d9748c23"}, ] scp = [ - {file = "scp-0.13.6-py2.py3-none-any.whl", hash = "sha256:5e23f22b00bdbeed83a982c6b2dfae98c125b80019c15fbb16dd64dfd864a452"}, - {file = "scp-0.13.6.tar.gz", hash = "sha256:0a72f9d782e968b09b114d5607f96b1f16fe9942857afb355399edd55372fcf1"}, + {file = "scp-0.14.1-py2.py3-none-any.whl", hash = "sha256:e4e0b9b41b73ebcc4e988e8f43039dc3715e88f3ee7b3e2d21521975bcfc82ee"}, + {file = "scp-0.14.1.tar.gz", hash = "sha256:b776bd6ce8c8385aa9a025b64a9815b5d798f12d4ef0d712d569503f62aece8b"}, ] singledispatch = [ - {file = "singledispatch-3.6.2-py2.py3-none-any.whl", hash = "sha256:0d428477703d8386eb6aeed6e522c9f22d49f4363cdf4ed6a2ba3dc276053e20"}, - {file = "singledispatch-3.6.2.tar.gz", hash = "sha256:d5bb9405a4b8de48e36709238e8b91b4f6f300f81a5132ba2531a9a738eca391"}, + {file = "singledispatch-3.7.0-py2.py3-none-any.whl", hash = "sha256:bc77afa97c8a22596d6d4fc20f1b7bdd2b86edc2a65a4262bdd7cc3cc19aa989"}, + {file = "singledispatch-3.7.0.tar.gz", hash = "sha256:c1a4d5c1da310c3fd8fccfb8d4e1cb7df076148fd5d858a819e37fffe44f3092"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -2733,12 +3187,12 @@ social-auth-core = [ {file = "social_auth_core-4.1.0-py3-none-any.whl", hash = "sha256:983b53167ac56e7ba4909db555602a6e7a98c97ca47183bb222eb85ba627bf2b"}, ] sqlparse = [ - {file = "sqlparse-0.4.1-py3-none-any.whl", hash = "sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0"}, - {file = "sqlparse-0.4.1.tar.gz", hash = "sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8"}, + {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, + {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, ] stevedore = [ - {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, - {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, + {file = "stevedore-3.4.0-py3-none-any.whl", hash = "sha256:920ce6259f0b2498aaa4545989536a27e4e4607b8318802d7ddc3a533d3d069e"}, + {file = "stevedore-3.4.0.tar.gz", hash = "sha256:59b58edb7f57b11897f150475e7bc0c39c5381f0b8e3fa9f5c20ce6c89ec4aa1"}, ] svgwrite = [ {file = "svgwrite-1.4.1-py3-none-any.whl", hash = "sha256:4b21652a1d9c543a6bf4f9f2a54146b214519b7540ca60cb99968ad09ef631d0"}, @@ -2765,8 +3219,8 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] transitions = [ - {file = "transitions-0.8.8-py2.py3-none-any.whl", hash = "sha256:f35efa070fbdf9a0f3f093b19f1258068786af75786a8cbcc884444f3d1a66d4"}, - {file = "transitions-0.8.8.tar.gz", hash = "sha256:e7a86b31a161a76133f189b3ae9dad2755a80ea4c1e0eee1805648d021fb677d"}, + {file = "transitions-0.8.9-py2.py3-none-any.whl", hash = "sha256:6e03db07fb29bfdb2f560c9da834d995782a06e50dda26e103cfdd5bcb1c56b7"}, + {file = "transitions-0.8.9.tar.gz", hash = "sha256:fc2ec6d6b6f986cd7e28e119eeb9ba1c9cc51ab4fbbdb7f2dedad01983fd2de0"}, ] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, @@ -2801,9 +3255,9 @@ typed-ast = [ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, ] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, @@ -2813,9 +3267,24 @@ urllib3 = [ {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, ] +vine = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +win32-setctime = [ + {file = "win32_setctime-1.0.3-py3-none-any.whl", hash = "sha256:dc925662de0a6eb987f0b01f599c01a8236cb8c62831c22d9cada09ad958243e"}, + {file = "win32_setctime-1.0.3.tar.gz", hash = "sha256:4e88556c32fdf47f64165a2180ba4552f8bb32c1103a2fafd05723a0bd42bd4b"}, +] wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] +yamllint = [ + {file = "yamllint-1.26.3.tar.gz", hash = "sha256:3934dcde484374596d6b52d8db412929a169f6d9e52e20f9ade5bf3523d9b96e"}, +] yamlordereddictloader = [ {file = "yamlordereddictloader-0.4.0.tar.gz", hash = "sha256:7f30f0b99ea3f877f7cb340c570921fa9d639b7f69cba18be051e27f8de2080e"}, ] diff --git a/pyproject.toml b/pyproject.toml index cbe01698..c549c8ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-golden-config" -version = "0.9.5" +version = "0.9.6" description = "A plugin for configuration on nautobot" authors = ["Network to Code, LLC", ""] @@ -30,11 +30,12 @@ packages = [ [tool.poetry.dependencies] python = "^3.6" -matplotlib = "^3.3.2" -nautobot-plugin-nornir = ">=0.9.3" -django-debug-toolbar = "^3.2.1" astroid = "^2.5.5" +deepdiff = "^5.5.0" +django-debug-toolbar = "^3.2.1" django-pivot = "^1.8.1" +matplotlib = "^3.3.2" +nautobot-plugin-nornir = ">=0.9.4" [tool.poetry.dev-dependencies] black = "^20.8b1" @@ -44,6 +45,7 @@ pylint-django = "^2.0.15" pydocstyle = "^5.0.2" flake8 = "^3.8.4" invoke = "^1.5.0" +yamllint = "^1.26.3" [tool.black] diff --git a/tasks.py b/tasks.py index 74552c80..ec788c51 100644 --- a/tasks.py +++ b/tasks.py @@ -1,236 +1,239 @@ """Tasks for use with Invoke.""" import os -from invoke import task +from distutils.util import strtobool +from invoke import Collection, task as invoke_task -PYTHON_VER = os.getenv("PYTHON_VER", "3.7") -NAUTOBOT_VER = os.getenv("NAUTOBOT_VER", "1.0.1") -NAUTOBOT_SRC_URL = os.getenv("NAUTOBOT_SRC_URL", f"https://github.com/nautobot/nautobot/archive/{NAUTOBOT_VER}.tar.gz") -# Name of the docker image/container -NAME = os.getenv("IMAGE_NAME", "nautobot-golden-config") -PWD = os.getcwd() +def is_truthy(arg): + """Convert "truthy" strings into Booleans. -COMPOSE_FILE = "development/docker-compose.yml" -COMPOSE_OVERRIDE = "docker-compose.override.yml" -BUILD_NAME = "nautobot_golden_config" + Examples: + >>> is_truthy('yes') + True + Args: + arg (str): Truthy string (True values are y, yes, t, true, on and 1; false values are n, no, + f, false, off and 0. Raises ValueError if val is anything else. + """ + if isinstance(arg, bool): + return arg + return bool(strtobool(arg)) + + +COMPOSE_FILES = ["docker-compose.yml", "../docker-compose.override.yml"] +if os.getenv("NAUTOBOT_USE_MYSQL"): + COMPOSE_FILES.append("docker-compose.mysql.yml") + +namespace = Collection("nautobot_golden_config") +namespace.configure( + { + "nautobot_golden_config": { + "nautobot_ver": "1.0.1", + "project_name": "nautobot_golden_config", + "python_ver": "3.7", + "local": False, + "compose_dir": os.path.join(os.path.dirname(__file__), "development"), + "compose_files": COMPOSE_FILES, + } + } +) + + +def task(function=None, *args, **kwargs): + """Task decorator to override the default Invoke task decorator and add each task to the invoke namespace.""" + + def task_wrapper(function=None): + """Wrapper around invoke.task to add the task to the namespace as well.""" + if args or kwargs: + task_func = invoke_task(*args, **kwargs)(function) + else: + task_func = invoke_task(function) + namespace.add_task(task_func) + return task_func + + if function: + # The decorator was called with no arguments + return task_wrapper(function) + # The decorator was called with arguments + return task_wrapper + + +def docker_compose(context, command, **kwargs): + """Helper function for running a specific docker-compose command with all appropriate parameters and environment. + + Args: + context (obj): Used to run specific commands + command (str): Command string to append to the "docker-compose ..." command, such as "build", "up", etc. + **kwargs: Passed through to the context.run() call. + """ + build_env = { + "NAUTOBOT_VER": context.nautobot_golden_config.nautobot_ver, + "PYTHON_VER": context.nautobot_golden_config.python_ver, + } + compose_command = f'docker-compose --project-name {context.nautobot_golden_config.project_name} --project-directory "{context.nautobot_golden_config.compose_dir}"' + for compose_file in context.nautobot_golden_config.compose_files: + compose_file_path = os.path.join(context.nautobot_golden_config.compose_dir, compose_file) + if os.path.isfile(compose_file_path): + compose_command += f' -f "{compose_file_path}"' + compose_command += f" {command}" + print(f'Running docker-compose command "{command}"') + return context.run(compose_command, env=build_env, **kwargs) + + +def run_command(context, command, **kwargs): + """Wrapper to run a command locally or inside the nautobot container.""" + if is_truthy(context.nautobot_golden_config.local): + context.run(command, **kwargs) + else: + # Check if nautobot is running, no need to start another nautobot container to run a command + docker_compose_status = "ps --services --filter status=running" + results = docker_compose(context, docker_compose_status, hide="out") + if "nautobot" in results.stdout: + compose_command = f"exec nautobot {command}" + else: + compose_command = f"run --entrypoint '{command}' nautobot" -DEFAULT_ENV = { - "NAUTOBOT_VER": NAUTOBOT_VER, - "PYTHON_VER": PYTHON_VER, - "NAUTOBOT_SRC_URL": NAUTOBOT_SRC_URL, -} + docker_compose(context, compose_command, pty=True) -COMPOSE_APPEND = "" -if os.path.isfile(COMPOSE_OVERRIDE): - COMPOSE_APPEND = f"-f {COMPOSE_OVERRIDE}" -COMPOSE_COMMAND = f"docker-compose -f {COMPOSE_FILE} {COMPOSE_APPEND} -p {BUILD_NAME}" -environment = DEFAULT_ENV # ------------------------------------------------------------------------------ # BUILD # ------------------------------------------------------------------------------ -@task -def build(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +@task( + help={ + "force_rm": "Always remove intermediate containers", + "cache": "Whether to use Docker's cache when building the image (defaults to enabled)", + } +) +def build(context, force_rm=False, cache=True): """Build all docker images. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver + command = "build" - context.run( - f"{COMPOSE_COMMAND} build", - env=DEFAULT_ENV, - ) + if not cache: + command += " --no-cache" + if force_rm: + command += " --force-rm" + + print(f"Building Nautobot with Python {context.nautobot_golden_config.python_ver}...") + docker_compose(context, command) # ------------------------------------------------------------------------------ # START / STOP / DEBUG # ------------------------------------------------------------------------------ @task -def debug(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def debug(context): """Start Nautobot and its dependencies in debug mode. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - print("Starting Nautobot .. ") - context.run( - f"{COMPOSE_COMMAND} up", - env=DEFAULT_ENV, - ) + print("Starting Nautobot in debug mode...") + docker_compose(context, "up") @task -def start(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def start(context): """Start Nautobot and its dependencies in detached mode. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - print("Starting Nautobot in detached mode.. ") - context.run( - f"{COMPOSE_COMMAND} up -d", - env=DEFAULT_ENV, - ) + docker_compose(context, "up --detach") @task -def stop(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def stop(context): """Stop Nautobot and its dependencies. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - print("Stopping Nautobot .. ") - context.run( - f"{COMPOSE_COMMAND} down", - env=DEFAULT_ENV, - ) + print("Stopping Nautobot...") + docker_compose(context, "down") @task -def destroy(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def destroy(context): """Destroy all containers and volumes. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - context.run( - f"{COMPOSE_COMMAND} down", - env=DEFAULT_ENV, - ) - context.run( - f"docker volume rm -f {BUILD_NAME}_pgdata_nautobot_golden_config", - env=DEFAULT_ENV, - ) + print("Destroying Nautobot...") + docker_compose(context, "down --volumes") # ------------------------------------------------------------------------------ # ACTIONS # ------------------------------------------------------------------------------ @task -def nbshell(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def nbshell(context): """Launch a nbshell session. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - context.run( - f"{COMPOSE_COMMAND} run nautobot nautobot-server nbshell", - env=DEFAULT_ENV, - pty=True, - ) + command = "nautobot-server nbshell" + run_command(context, command) @task -def cli(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def cli(context): """Launch a bash shell inside the running Nautobot container. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver + run_command(context, "bash") - context.run( - f"{COMPOSE_COMMAND} run nautobot bash", - env=DEFAULT_ENV, - pty=True, - ) - -@task -def create_user(context, user="admin", nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +@task( + help={ + "user": "name of the superuser to create (default: admin)", + } +) +def create_user(context, user="admin"): """Create a new user in django (default: admin), will prompt for password. Args: context (obj): Used to run specific commands user (str): name of the superuser to create - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - context.run( - f"{COMPOSE_COMMAND} run nautobot nautobot-server createsuperuser --username {user}", - env=DEFAULT_ENV, - pty=True, - ) + command = f"nautobot-server createsuperuser --username {user}" + run_command(context, command) -@task -def makemigrations(context, name="", nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +@task( + help={ + "name": "name of the migration to be created; if unspecified, will autogenerate a name", + } +) +def makemigrations(context, name=""): """Run Make Migration in Django. Args: context (obj): Used to run specific commands name (str): Name of the migration to be created - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - context.run( - f"{COMPOSE_COMMAND} up -d postgres", - env=DEFAULT_ENV, - ) + command = "nautobot-server makemigrations nautobot_golden_config" if name: - context.run( - f"{COMPOSE_COMMAND} run nautobot nautobot-server makemigrations nautobot_golden_config --name {name}", - env=DEFAULT_ENV, - ) - else: - context.run( - f"{COMPOSE_COMMAND} run nautobot nautobot-server makemigrations nautobot_golden_config", - env=DEFAULT_ENV, - ) + command += f" --name {name}" - context.run( - f"{COMPOSE_COMMAND} down", - env=DEFAULT_ENV, - ) + run_command(context, command) # ------------------------------------------------------------------------------ # TESTS / LINTING # ------------------------------------------------------------------------------ @task -def unittest(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def unittest(context): """Run Django unit tests for the plugin. Args: @@ -238,122 +241,96 @@ def unittest(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): nautobot_ver (str): Nautobot version to use to build the container python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - docker = f"{COMPOSE_COMMAND} run --entrypoint='' nautobot " - context.run( - f'{docker} sh -c "nautobot-server test nautobot_golden_config"', - env=DEFAULT_ENV, - pty=True, - ) + command = "nautobot-server test nautobot_golden_config" + run_command(context, command) @task -def pylint(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def pylint(context): """Run pylint code analysis. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - docker = f"{COMPOSE_COMMAND} run --entrypoint='' nautobot " - # We exclude the /migrations/ directory since it is autogenerated code - context.run( - f"{docker} sh -c \"cd /source && find . -name '*.py' -not -path '*/migrations/*' | " - 'PYTHONPATH=/source/development DJANGO_SETTINGS_MODULE=nautobot_config xargs pylint"', - env=DEFAULT_ENV, - pty=True, - ) + command = 'pylint --init-hook "import nautobot; nautobot.setup()" --rcfile pyproject.toml nautobot_golden_config' + run_command(context, command) @task -def black(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def black(context): """Run black to check that Python files adhere to its style standards. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - docker = f"{COMPOSE_COMMAND} run --entrypoint='' nautobot " - context.run( - f'{docker} sh -c "cd /source && black --check --diff ."', - env=DEFAULT_ENV, - pty=True, - ) + command = "black --check --diff ." + run_command(context, command) @task -def pydocstyle(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def pydocstyle(context): """Run pydocstyle to validate docstring formatting adheres to NTC defined standards. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - - docker = f"{COMPOSE_COMMAND} run --entrypoint='' nautobot " - # We exclude the /migrations/ directory since it is autogenerated code - context.run( - f"{docker} sh -c \"cd /source && find . -name '*.py' -not -path '*/migrations/*' | xargs pydocstyle\"", - env=DEFAULT_ENV, - pty=True, - ) + command = 'pydocstyle --config=.pydocstyle.ini --match-dir="^(?!migrations).*"' + run_command(context, command) @task -def bandit(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def bandit(context): """Run bandit to validate basic static code security analysis. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver + command = "bandit --recursive . --configfile .bandit.yml" + run_command(context, command) + - docker = f"{COMPOSE_COMMAND} run --entrypoint='' nautobot " - context.run( - f'{docker} sh -c "cd /source && bandit --recursive ./ --configfile .bandit.yml"', - env=DEFAULT_ENV, - pty=True, - ) +@task +def yamllint(context): + """Run yamllint to validate formating adheres to NTC defined YAML standards. + + Args: + context (obj): Used to run specific commands + """ + command = "yamllint . --format standard" + run_command(context, command) + + +@task +def flake8(context): + """Check for PEP8 compliance and other style issues.""" + command = "flake8 ." + run_command(context, command) @task -def tests(context, nautobot_ver=NAUTOBOT_VER, python_ver=PYTHON_VER): +def tests(context): """Run all tests for this plugin. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from """ - DEFAULT_ENV[NAUTOBOT_VER] = nautobot_ver - DEFAULT_ENV[PYTHON_VER] = python_ver - + if not is_truthy(context.nautobot_golden_config.local): + print("Starting Docker Containers...") + start(context) # Sorted loosely from fastest to slowest print("Running black...") - black(context, nautobot_ver=nautobot_ver, python_ver=python_ver) + black(context) print("Running bandit...") - bandit(context, nautobot_ver=nautobot_ver, python_ver=python_ver) + bandit(context) print("Running pydocstyle...") - pydocstyle(context, nautobot_ver=nautobot_ver, python_ver=python_ver) + pydocstyle(context) + print("Running yamllint...") + yamllint(context) + print("Running flake8...") + flake8(context) print("Running pylint...") - pylint(context, nautobot_ver=nautobot_ver, python_ver=python_ver) + pylint(context) print("Running unit tests...") - unittest(context, nautobot_ver=nautobot_ver, python_ver=python_ver) + unittest(context) print("All tests have passed!")