From 4aff26af0c743317519f0fb098ed91963f5f5c49 Mon Sep 17 00:00:00 2001 From: Chandrika Sivaramakrishnan Date: Wed, 29 Mar 2023 18:27:29 -0700 Subject: [PATCH] Initial working version of postgresql historian --- .gitignore | 6 + LICENSE | 216 +++++++++ README.md | 229 ++++++++++ config | 12 + dev/pyproject.toml | 55 +++ docs/Makefile | 22 + docs/source/conf.py | 60 +++ docs/source/index.rst | 264 +++++++++++ pyproject.toml | 53 +++ src/historian/postgresql/__init__.py | 34 ++ src/historian/postgresql/postgresqlfuncts.py | 438 +++++++++++++++++++ tests/conftest.py | 44 ++ 12 files changed, 1433 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 config create mode 100644 dev/pyproject.toml create mode 100644 docs/Makefile create mode 100644 docs/source/conf.py create mode 100644 docs/source/index.rst create mode 100644 pyproject.toml create mode 100644 src/historian/postgresql/__init__.py create mode 100644 src/historian/postgresql/postgresqlfuncts.py create mode 100644 tests/conftest.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b82b999 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +/dev/poetry.lock +/.venv/ +/poetry.lock +/docs/build/ +/dev/.venv/ +/.idea/ \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..9464da7 --- /dev/null +++ b/LICENSE @@ -0,0 +1,216 @@ +Copyright 2022 Battelle Memorial Institute + +Licensed under the Apache License, Version 2.0 (the "License"); you may not +use this file except in compliance with the License. You may obtain a copy +of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..f1f9136 --- /dev/null +++ b/README.md @@ -0,0 +1,229 @@ +[![Run Pytests](https://github.com/eclipse-volttron/volttron-postgresql-historian/actions/workflows/run-test.yml/badge.svg)](https://github.com/eclipse-volttron/volttron-postgresql-historian/actions/workflows/run-test.yml) +[![pypi version](https://img.shields.io/pypi/v/volttron-postgresql-historian.svg)](https://pypi.org/project/volttron-postgresql-historian/) + +VOLTTRON historian agent that stores data into a PostgreSQL database + +### Installation notes + +1. The PostgreSQL database driver supports recent PostgreSQL versions. + It was tested on 10.x, but should work with 9.x and 11.x. +2. The user must have SELECT, INSERT, and UPDATE privileges on + historian tables. +3. The tables in the database are created as part of the execution of + the SQLHistorianAgent, but this will fail if the database user does + not have CREATE privileges. +4. Care must be exercised when using multiple historians with the same + database. This configuration may be used only if there is no overlap + in the topics handled by each instance. Otherwise, duplicate topic + IDs may be created, producing strange results. + + +### Configuration + +PostgreSQL historian supports two configuration parameters + + - connection - This is a mandatory parameter with type indicating the type of sql historian (i.e. postgresql) and + params containing the database access details + + - tables_def - Optional parameter to provide custom table names for topics, data, and metadata. + +The configuration can be in a json or yaml formatted file. The following examples show minimal connection +configurations for a psycopg2-based historian. Other options are available and are +[documented here](https://www.psycopg.org/docs/module.html#psycopg2.connect) **Not all parameters have been tested, +use at your own risk**. + +#### Local PostgreSQL Database + +The following snippet demonstrates how to configure the historian to use a PostgreSQL database on the local system that +is configured to use Unix domain sockets. The user executing volttron must have appropriate privileges. + + +##### Yaml Format: +```yaml + connection: + # type should be postgresql + type: postgresql + params: + # Relative to the agents data directory + dbname: "volttron" + + tables_def: + # prefix for data, topics, and (in version < 4.0.0 metadata tables) + # default is "" + table_prefix: "" + # table name for time series data. default "data" + data_table: data + # table name for list of topics. default "topics" + topics_table: topics + ``` + +##### JSON format: +```json + { + "connection": { + "type": "postgresql", + "params": { "dbname": "volttron" } + } + } +``` + +#### Remote PostgreSQL Database + +The following snippet demonstrates how to configure the historian to use a remote PostgreSQL database. +```json + { + "connection": { + "type": "postgresql", + "params": { + "dbname": "volttron", + "host": "historian.example.com", + "port": 5432, + "user": "volttron", + "password": "secret" } + } + } +``` + +#### TimescaleDB Support + +Both of the above PostgreSQL connection types can make use of TimescaleDB\'s high performance Hypertable backend for +the primary timeseries table. The agent assumes you have completed the TimescaleDB installation and setup the +database by following the instructions here: To use, simply +add \'timescale_dialect: true\' to the connection params in the agent config as below + +```json + { + "connection": { + "type": "postgresql", + "params": { + "dbname": "volttron", + "host": "historian.example.com", + "port": 5432, + "user": "volttron", + "password": "secret" , + "timescale_dialect": true } + } + + } +``` + +## Requirements + + - Python >= 3.8 + - psycopg2 library + +## Installation + +1. Create and activate a virtual environment. + + ```shell + python -m venv env + source env/bin/activate + ``` + +2. Installing volttron-postgresql-historian requires a running volttron instance and the psycopg2 library + + ```shell + pip install volttron + pip install psycopg2-binary + + # Start platform with output going to volttron.log + volttron -vv -l volttron.log & + ``` +3. Setup database + + If this is not a development environment we highly recommend that you create the database and database tables using + a user with appropriate permissions. This way the database user used by the historian need not have CREATE privileges + Postgres historian expects two tables + a. A topics tables that stores the list of unique topics and its metadata. The default name is "topics". If you use + a different name please specify it as part of "tables_def" configuration parameter in agent config. See (#Yaml-Format) + b. A data table that stores the timeseries data and refers to the topic table using a topic id. The default name is + "data". If you use a different name please specify it as part of "tables_def" configuration parameter in + agent config. See (#Yaml-Format) + + Below are the sql statements to create database and tables + Create Database + ``` + CREATE DATABASE volttron + ``` + TOPICS tables: + ``` + CREATE TABLE IF NOT EXISTS topics ( + topic_id SERIAL PRIMARY KEY NOT NULL, + topic_name VARCHAR(512) NOT NULL, + metadata TEXT, + UNIQUE (topic_name) + ) + ``` + + DATA table: + ``` + CREATE TABLE IF NOT EXISTS data ( + ts TIMESTAMP NOT NULL, + topic_id INTEGER NOT NULL, + value_string TEXT NOT NULL, + UNIQUE (topic_id, ts) + ) + ``` + Optional timescale hypertable + ``` + SELECT create_hypertable(data, 'ts', if_not_exists => true) + ``` + Create index to speed up data access + If using hypertables: + ``` + CREATE INDEX IF NOT EXISTS idx_data ON data (topic_id, ts) + ``` + If not using hypertables: + ``` + CREATE INDEX IF NOT EXISTS idx_data ON data (ts ASC) + ``` + Provide correct user permissions for database user to be used by historian agent + ``` + CREATE USER with encrypted password + GRANT SELECT, INSERT, UPDATE on database to + ``` + **NOTE** + For development environments, you can create a test database and test user, grant all privileges on that test + database to the test user and let the historian create tables and indexes at startup. We do not recommend this for + production environments + +4. Create an agent configuration file + + Create an agent configuration with appropriate connection parameters as described in [Configurations section](#Configuration) + +5. Install and start the volttron-postgresql-historian. + + ```shell + vctl install volttron-postgresql-historian --agent-config --start + ``` + +6. View the status of the installed agent + + ```shell + vctl status + ``` + +## Development + +Please see the following for contributing guidelines [contributing](https://github.com/eclipse-volttron/volttron-core/blob/develop/CONTRIBUTING.md). + +Please see the following helpful guide about [developing modular VOLTTRON agents](https://github.com/eclipse-volttron/volttron-core/blob/develop/DEVELOPING_ON_MODULAR.md) + +# Disclaimer Notice + +This material was prepared as an account of work sponsored by an agency of the +United States Government. Neither the United States Government nor the United +States Department of Energy, nor Battelle, nor any of their employees, nor any +jurisdiction or organization that has cooperated in the development of these +materials, makes any warranty, express or implied, or assumes any legal +liability or responsibility for the accuracy, completeness, or usefulness or any +information, apparatus, product, software, or process disclosed, or represents +that its use would not infringe privately owned rights. + +Reference herein to any specific commercial product, process, or service by +trade name, trademark, manufacturer, or otherwise does not necessarily +constitute or imply its endorsement, recommendation, or favoring by the United +States Government or any agency thereof, or Battelle Memorial Institute. The +views and opinions of authors expressed herein do not necessarily state or +reflect those of the United States Government or any agency thereof. diff --git a/config b/config new file mode 100644 index 0000000..80cf3b0 --- /dev/null +++ b/config @@ -0,0 +1,12 @@ +{ + "connection": { + "type": "postgresql", + "params": { + "dbname": "test_historian", + "host": "127.0.0.1", + "port": 5432, + "user": "historian", + "password": "historian" + } + } +} diff --git a/dev/pyproject.toml b/dev/pyproject.toml new file mode 100644 index 0000000..f045d72 --- /dev/null +++ b/dev/pyproject.toml @@ -0,0 +1,55 @@ +[tool.poetry] +name = "volttron-postgresql-historian" +version = "0.1.0-rc0" +description = "VOLTTRON historian agent that stores data in a PostgreSQL database. It extends the SQLHistorian class." +authors = ["VOLTTRON Team "] +license = "Apache License 2.0" +readme = "../README.md" +repository = "https://github.com/eclipse-volttron/volttron-postgresql-historian" +homepage = "https://github.com/eclipse-volttron/volttron-postgresql-historian" +keywords = [] +packages = [ { include = "historian", from = "../src" } ] + +[tool.poetry.dependencies] +python = ">=3.10,<4.0" +psycopg2-binary = "^2.9.5" + +[tool.poetry.group.dev.dependencies] +# formatting, quality, tests +pytest = "^6.2.5" +mock = "^4.0.3" +pre-commit = "^2.17.0" +yapf = "^0.32.0" +toml = "^0.10.2" +isort = "^5.10.1" +safety = "^1.10.3" +mypy = "^0.942" +coverage = "^6.3.2" +pytest-cov = "^3.0.0" +Sphinx = "^4.5.0" +sphinx-rtd-theme = "^1.0.0" +volttron = {path = "../../volttron-core", develop = true} +volttron-testing = {path = "../../volttron-testing", develop = true} +volttron-lib-base-historian = {path = "../../volttron-lib-base-historian", develop = true} +volttron-lib-sql-historian = {path = "../../volttron-lib-sql-historian", develop = true} + +[tool.yapfignore] +ignore_patterns = [ + ".venv/**", + ".pytest_cache/**", + "dist/**", + "docs/**" +] + +[tool.yapf] +based_on_style = "pep8" +spaces_before_comment = 4 +column_limit = 99 +split_before_logical_operator = true + +[tool.poetry.scripts] +volttron-postgresql-historian = "historian.sql.historian:main" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..ca28711 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,22 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build +SPHINXPROJ = VOLTTRON Sqlite Historian + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..a3fea84 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- {{{ +# ===----------------------------------------------------------------------=== +# +# Installable Component of Eclipse VOLTTRON +# +# ===----------------------------------------------------------------------=== +# +# Copyright 2022 Battelle Memorial Institute +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# ===----------------------------------------------------------------------=== +# }}} + +# Configuration file for the Sphinx documentation builder. + +# -- Project information + +project = 'VOLTTRON PostgreSQL Historian' +copyright = '2022, PNNL' +author = 'PNNL' + +release = '0.1.0-rc0' +version = '0.1.0' + +# -- General configuration + +extensions = [ + 'sphinx.ext.duration', + 'sphinx.ext.doctest', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', +] + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), + 'sphinx': ('https://www.sphinx-doc.org/en/master/', None), +} +intersphinx_disabled_domains = ['std'] + +templates_path = ['_templates'] + +# -- Options for HTML output + +html_theme = 'sphinx_rtd_theme' + +# -- Options for EPUB output +# epub_show_urls = 'footnote' + diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..a0a141c --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,264 @@ + +PostgreSQL Historian +==================== + +This is a VOLTTRON historian agent that stores its data in a PostgreSQL database. It depends on +`volttron-lib-sql-historian `_ and extends the class +`SQLHistorian `_ +This historian also supports TimescaleDB\'s high performance Hypertable backend for the primary timeseries table +The PostgreSQL database driver supports recent PostgreSQL versions. It was tested on 10.x, but should work with 9.x +and 11.x. + +User Access Requirements +************************ +1. The user must have SELECT, INSERT, and UPDATE privileges on historian tables. +2. In a development environment, the tables in the database could be created as part of the execution of + the SQLHistorianAgent, if the database user has CREATE privileges. We don't recommend this for production environments + +.. _postgresql-configuration: + +Configuration +************* + +PostgreSQL historian supports two configuration parameters + + - connection - This is a mandatory parameter with type indicating the type of sql historian (i.e. postgresql) and + params containing the database access details + + - tables_def - Optional parameter to provide custom table names for topics, data, and metadata. + +The configuration can be in a json or yaml formatted file. The following examples show minimal connection +configurations for a psycopg2-based historian. Other options are available and are +`documented here `_ +**Not all parameters have been tested, use at your own risk**. + +Local PostgreSQL Database +-------------------------- +The following snippet demonstrates how to configure the historian to use +a PostgreSQL database on the local system that is configured to use Unix +domain sockets. The user executing volttron must have appropriate +privileges. + +.. note:: + Care must be exercised when using multiple historians with the same database and table names. This configuration may + be used only if there is no overlap in the topics handled by each instance. Otherwise, duplicate topic + IDs may be created, producing strange results. Different table name can be used for different historian instances by + using the optional tables_def configurations + +.. _postgresql-configuration-yaml-example: + +Yaml Format: +^^^^^^^^^^^^^ + +.. code:: yaml + + connection: + # type should be postgresql + type: postgresql + params: + # Relative to the agents data directory + dbname: "volttron" + + tables_def: + # prefix for data and topics table + # default is "". If configured, table name would be . and . + # useful when multiple historian instances use same database + table_prefix: "" + # table name for time series data. default "data" + data_table: data + # table name for list of topics. default "topics" + topics_table: topics + +JSON format: +^^^^^^^^^^^^^ + +.. code:: json + + { + "connection": { + "type": "postgresql", + "params": { "dbname": "volttron" } + } + } + + +Remote PostgreSQL Database +--------------------------- + +The following snippet demonstrates how to configure the historian to use +a remote PostgreSQL database. + +.. code:: json + + { + "connection": { + "type": "postgresql", + "params": { + "dbname": "volttron", + "host": "historian.example.com", + "port": 5432, + "user": "volttron", + "password": "secret" } + } + } + +TimescaleDB Support +-------------------- + +Both of the above PostgreSQL connection types can make use of +TimescaleDB's high performance Hypertable backend for the primary +timeseries table. The agent assumes you have completed the TimescaleDB +installation and setup the database by following the instructions here: +https://docs.timescale.com/latest/getting-started/setup To use, simply +add 'timescale_dialect: true' to the connection params in the agent +config as below + +.. code:: json + + { + "connection": { + "type": "postgresql", + "params": { + "dbname": "volttron", + "host": "historian.example.com", + "port": 5432, + "user": "volttron", + "password": "secret" , + "timescale_dialect": true } + } + + } + +Optional Configuration +*********************** + +In addition to the above configuration, SQLite Historian can optionally be configured using all the available +configurations exposed by the SQLHistorian and BaseHistorian. Please refer to +:ref:`SQL Historian ` and :ref:`Base Historian Configurations ` +for more details + +Requirements +************ + +- Python >= 3.8 +- psycopg2 library + +Installation +************ + +1. Create and activate a virtual environment. + + .. code:: shell + + python -m venv env + source env/bin/activate + +2. Installing volttron-postgresql-historian requires a running volttron + instance and the psycopg2 library + + .. code:: shell + + pip install volttron + pip install psycopg2-binary + + # Start platform with output going to volttron.log + volttron -vv -l volttron.log & + +3. Setup database + + If this is not a development environment we highly recommend that you + create the database and database tables using a user with appropriate + permissions. This way the database user used by the historian need + not have CREATE privileges. + + | Postgres historian expects two tables + + a. A topics tables that stores the list of unique topics and its + metadata. The default name is "topics". If you use a different + name please specify it as part of "tables_def" configuration + parameter in agent config. See (:ref:`example configuration`) + b. A data table that stores the timeseries data and refers to the + topic table using a topic id. The default name is "data". If you + use a different name please specify it as part of "tables_def" + configuration parameter in agent config. See (:ref:`example configuration`) + + Below are the sql statements to create database and tables. + + Create Database: + + :: + + CREATE DATABASE volttron + + TOPICS tables: + + :: + + CREATE TABLE IF NOT EXISTS topics ( + topic_id SERIAL PRIMARY KEY NOT NULL, + topic_name VARCHAR(512) NOT NULL, + metadata TEXT, + UNIQUE (topic_name) + ) + + DATA table: + + :: + + CREATE TABLE IF NOT EXISTS data ( + ts TIMESTAMP NOT NULL, + topic_id INTEGER NOT NULL, + value_string TEXT NOT NULL, + UNIQUE (topic_id, ts) + ) + + Optional timescale hypertable: + + :: + + SELECT create_hypertable(data, 'ts', if_not_exists => true) + + + Create index to speed up data access: + If using hypertables: + + :: + + CREATE INDEX IF NOT EXISTS idx_data ON data (topic_id, ts) + + If not using hypertables: + + :: + + CREATE INDEX IF NOT EXISTS idx_data ON data (ts ASC) + + Provide correct user permissions for database user to be used by + historian agent + + :: + + CREATE USER with encrypted password + GRANT SELECT, INSERT, UPDATE on database to + + .. note:: + For development environments, you can create a test database + and test user, grant all privileges on that test database to the test + user and let the historian create tables and indexes at startup. We + do not recommend this for production environments + +4. Create an agent configuration file + + Create an agent configuration with appropriate connection parameters + as described in :ref:`Configuration section` + +5. Install and start the volttron-postgresql-historian. + + .. code:: shell + + vctl install volttron-postgresql-historian --agent-config --start + +6. View the status of the installed agent + + .. code:: shell + + vctl status diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7864eb7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,53 @@ +[tool.poetry] +name = "volttron-postgresql-historian" +version = "0.1.0-rc0" +description = "VOLTTRON historian agent that stores data in a PostgreSQL database. It extends the SQLHistorian class." +authors = ["VOLTTRON Team "] +license = "Apache License 2.0" +readme = "README.md" +repository = "https://github.com/eclipse-volttron/volttron-postgresql-historian" +homepage = "https://github.com/eclipse-volttron/volttron-postgresql-historian" +keywords = [] +packages = [ { include = "historian", from = "src" } ] + +[tool.poetry.dependencies] +python = ">=3.10,<4.0" +volttron-lib-sql-historian="^0.2.0rc0" +psycopg2-binary = "^2.9.5" + +[tool.poetry.group.dev.dependencies] +# formatting, quality, tests +pytest = "^6.2.5" +mock = "^4.0.3" +pre-commit = "^2.17.0" +yapf = "^0.32.0" +toml = "^0.10.2" +isort = "^5.10.1" +safety = "^1.10.3" +mypy = "^0.942" +coverage = "^6.3.2" +pytest-cov = "^3.0.0" +Sphinx = "^4.5.0" +sphinx-rtd-theme = "^1.0.0" +volttron-testing = "^0.4.0rc1" + +[tool.yapfignore] +ignore_patterns = [ + ".venv/**", + ".pytest_cache/**", + "dist/**", + "docs/**" +] + +[tool.yapf] +based_on_style = "pep8" +spaces_before_comment = 4 +column_limit = 99 +split_before_logical_operator = true + +[tool.poetry.scripts] +volttron-postgresql-historian = "historian.sql.historian:main" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/src/historian/postgresql/__init__.py b/src/historian/postgresql/__init__.py new file mode 100644 index 0000000..5797b4e --- /dev/null +++ b/src/historian/postgresql/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- {{{ +# ===----------------------------------------------------------------------=== +# +# Installable Component of Eclipse VOLTTRON +# +# ===----------------------------------------------------------------------=== +# +# Copyright 2022 Battelle Memorial Institute +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# ===----------------------------------------------------------------------=== +# }}} + +""" +VOLTTRON PostgreSQL Historian package. + +None + +""" + +from typing import List + +__all__: List[str] = [] # noqa: WPS410 (the only __variable__ we use) diff --git a/src/historian/postgresql/postgresqlfuncts.py b/src/historian/postgresql/postgresqlfuncts.py new file mode 100644 index 0000000..c4b61c3 --- /dev/null +++ b/src/historian/postgresql/postgresqlfuncts.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- {{{ +# ===----------------------------------------------------------------------=== +# +# Installable Component of Eclipse VOLTTRON +# +# ===----------------------------------------------------------------------=== +# +# Copyright 2022 Battelle Memorial Institute +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# ===----------------------------------------------------------------------=== +# }}} + +import ast +import contextlib +import logging +import copy + +import pytz +import psycopg2 +from psycopg2 import InterfaceError, ProgrammingError, errorcodes +from psycopg2.sql import Identifier, Literal, SQL +from psycopg2.extras import execute_values + +from volttron.utils import (jsonapi, setup_logging) + +from historian.sql import DbDriver + +setup_logging() +_log = logging.getLogger(__name__) + + +""" +Implementation of PostgreSQL database operation for +:py:class:`historian.sql.SQLHistorian` +For method details please refer to base class +:py:class:`historian.sql.DbDriver` +""" + + +class PostgreSqlFuncts(DbDriver): + def __init__(self, connect_params, table_names): + if table_names: + self.data_table = table_names['data_table'] + self.topics_table = table_names['topics_table'] + self.meta_table = table_names['meta_table'] + self.agg_topics_table = table_names.get('agg_topics_table') + self.agg_meta_table = table_names.get('agg_meta_table') + connect_params = copy.deepcopy(connect_params) + if "timescale_dialect" in connect_params: + self.timescale_dialect = connect_params.get("timescale_dialect", False) + del connect_params["timescale_dialect"] + else: + self.timescale_dialect = False + + def connect(): + connection = psycopg2.connect(**connect_params) + connection.autocommit = True + with connection.cursor() as cursor: + cursor.execute('SET TIME ZONE UTC') + return connection + connect.__name__ = 'psycopg2' + super(PostgreSqlFuncts, self).__init__(connect) + + @contextlib.contextmanager + def bulk_insert(self): + """ + This function implements the bulk insert requirements for postgresql historian by overriding the + DbDriver::bulk_insert() in basedb.py and yields necessary data insertion method needed for bulk inserts + + :yields: insert method + """ + records = [] + + def insert_data(ts, topic_id, data): + """ + Inserts data records to the list + + :param ts: time stamp + :type string + :param topic_id: topic ID + :type string + :param data: data value + :type any valid JSON serializable value + :return: Returns True after insert + :rtype: bool + """ + value = jsonapi.dumps(data) + records.append((ts, topic_id, value)) + return True + + yield insert_data + + if records: + query = SQL('INSERT INTO {} VALUES %s ' + 'ON CONFLICT (ts, topic_id) DO UPDATE ' + 'SET value_string = EXCLUDED.value_string').format( + Identifier(self.data_table)) + execute_values(self.cursor(), query, records) + + @contextlib.contextmanager + def bulk_insert_meta(self): + """ + This function implements the bulk insert requirements for Redshift historian by overriding the + DbDriver::bulk_insert_meta() in basedb.py and yields necessary data insertion method needed for bulk inserts + + :yields: insert method + """ + records = [] + + def insert_meta(topic_id, metadata): + """ + Inserts metadata records to the list + + :param topic_id: topic ID + :type string + :param metadata: metadata dictionary + :type dict + :return: Returns True after insert + :rtype: bool + """ + value = jsonapi.dumps(metadata) + records.append((topic_id, value)) + return True + + yield insert_meta + + if records: + _log.debug(f"###DEBUG bulk inserting meta of len {len(records)}") + _log.debug(f"###DEBUG bulk inserting meta of len {records}") + + query = SQL('INSERT INTO {} VALUES %s' + 'ON CONFLICT (topic_id) DO UPDATE ' + 'SET metadata = EXCLUDED.metadata').format( + Identifier(self.meta_table)) + execute_values(self.cursor(), query, records) + + def rollback(self): + try: + return super(PostgreSqlFuncts, self).rollback() + except InterfaceError: + return False + + def setup_historian_tables(self): + rows = self.select(f"""SELECT table_name FROM information_schema.tables + WHERE table_catalog = 'test_historian' and table_schema = 'public' + AND table_name = '{self.data_table}'""") + if rows: + _log.debug("Found table {}. Historian table exists".format( + self.data_table)) + rows = self.select(f"""SELECT column_name FROM information_schema.columns + WHERE table_name = '{self.topics_table}' and column_name = 'metadata'""") + if rows: + # metadata is in topics table + self.meta_table = self.topics_table + else: + self.execute_stmt(SQL( + 'CREATE TABLE IF NOT EXISTS {} (' + 'ts TIMESTAMP NOT NULL, ' + 'topic_id INTEGER NOT NULL, ' + 'value_string TEXT NOT NULL, ' + 'UNIQUE (topic_id, ts)' + ')').format(Identifier(self.data_table))) + if self.timescale_dialect: + _log.debug("trying to create hypertable") + self.execute_stmt(SQL( + "SELECT create_hypertable({}, 'ts', if_not_exists => true)").format( + Literal(self.data_table))) + self.execute_stmt(SQL( + 'CREATE INDEX IF NOT EXISTS {} ON {} (topic_id, ts)').format( + Identifier(f"idx_{self.data_table}"), + Identifier(self.data_table)) + ) + else: + self.execute_stmt(SQL( + 'CREATE INDEX IF NOT EXISTS {} ON {} (ts ASC)').format( + Identifier('idx_' + self.data_table), + Identifier(self.data_table))) + + self.execute_stmt(SQL( + 'CREATE TABLE IF NOT EXISTS {} (' + 'topic_id SERIAL PRIMARY KEY NOT NULL, ' + 'topic_name VARCHAR(512) NOT NULL, ' + 'metadata TEXT, ' + 'UNIQUE (topic_name)' + ')').format(Identifier(self.topics_table))) + # metadata is in topics table + self.meta_table = self.topics_table + self.commit() + + def setup_aggregate_historian_tables(self): + + self.execute_stmt(SQL( + 'CREATE TABLE IF NOT EXISTS {} (' + 'agg_topic_id SERIAL PRIMARY KEY NOT NULL, ' + 'agg_topic_name VARCHAR(512) NOT NULL, ' + 'agg_type VARCHAR(20) NOT NULL, ' + 'agg_time_period VARCHAR(20) NOT NULL, ' + 'UNIQUE (agg_topic_name, agg_type, agg_time_period)' + ')').format(Identifier(self.agg_topics_table))) + self.execute_stmt(SQL( + 'CREATE TABLE IF NOT EXISTS {} (' + 'agg_topic_id INTEGER PRIMARY KEY NOT NULL, ' + 'metadata TEXT NOT NULL' + ')').format(Identifier(self.agg_meta_table))) + self.commit() + + def query(self, topic_ids, id_name_map, start=None, end=None, skip=0, + agg_type=None, agg_period=None, count=None, + order='FIRST_TO_LAST'): + if agg_type and agg_period: + table_name = agg_type + '_' + agg_period + value_col = 'agg_value' + else: + table_name = self.data_table + value_col = 'value_string' + + topic_id = Literal(0) + query = [SQL( + '''SELECT to_char(ts, 'YYYY-MM-DD"T"HH24:MI:SS.USOF:00'), ''' + value_col + ' \n' + 'FROM {}\n' + 'WHERE topic_id = {}' + ).format(Identifier(table_name), topic_id)] + if start and start.tzinfo != pytz.UTC: + start = start.astimezone(pytz.UTC) + if end and end.tzinfo != pytz.UTC: + end = end.astimezone(pytz.UTC) + if start and start == end: + query.append(SQL(' AND ts = {}').format(Literal(start))) + else: + if start: + query.append(SQL(' AND ts >= {}').format(Literal(start))) + if end: + query.append(SQL(' AND ts < {}').format(Literal(end))) + query.append(SQL('ORDER BY ts {}'.format( + 'DESC' if order == 'LAST_TO_FIRST' else 'ASC'))) + if skip or count: + query.append(SQL('LIMIT {} OFFSET {}').format( + Literal(None if not count or count < 0 else count), + Literal(None if not skip or skip < 0 else skip))) + query = SQL('\n').join(query) + values = {} + if value_col == 'agg_value': + for topic_id._wrapped in topic_ids: + name = id_name_map[topic_id.wrapped] + with self.select(query, fetch_all=False) as cursor: + values[name] = [(ts, value) + for ts, value in cursor] + else: + for topic_id._wrapped in topic_ids: + name = id_name_map[topic_id.wrapped] + with self.select(query, fetch_all=False) as cursor: + values[name] = [(ts, jsonapi.loads(value)) + for ts, value in cursor] + return values + + def insert_topic(self, topic, **kwargs): + meta = kwargs.get('metadata') + with self.cursor() as cursor: + if self.meta_table == self.topics_table and topic and meta: + cursor.execute(self.insert_topic_and_meta_query(), (topic, jsonapi.dumps(meta))) + else: + cursor.execute(self.insert_topic_query(), {'topic': topic}) + return cursor.fetchone()[0] + + def insert_agg_topic(self, topic, agg_type, agg_time_period): + with self.cursor() as cursor: + cursor.execute(self.insert_agg_topic_stmt(), + (topic, agg_type, agg_time_period)) + return cursor.fetchone()[0] + + def insert_meta_query(self): + return SQL( + 'INSERT INTO {} VALUES (%s, %s) ' + 'ON CONFLICT (topic_id) DO UPDATE ' + 'SET metadata = EXCLUDED.metadata').format( + Identifier(self.meta_table)) + + def insert_data_query(self): + return SQL( + 'INSERT INTO {} VALUES (%s, %s, %s) ' + 'ON CONFLICT (ts, topic_id) DO UPDATE ' + 'SET value_string = EXCLUDED.value_string').format( + Identifier(self.data_table)) + + def insert_topic_query(self): + return SQL( + 'INSERT INTO {} (topic_name) VALUES (%(topic)s) ' + 'RETURNING topic_id').format(Identifier(self.topics_table)) + + def insert_topic_and_meta_query(self): + return SQL( + 'INSERT INTO {} (topic_name, metadata) VALUES (%s, %s) ' + 'RETURNING topic_id').format(Identifier(self.topics_table)) + + def update_topic_query(self): + return SQL( + 'UPDATE {} SET topic_name = %s ' + 'WHERE topic_id = %s').format(Identifier(self.topics_table)) + + def update_topic_and_meta_query(self): + return SQL( + 'UPDATE {} SET topic_name = %s , metadata= %s ' + 'WHERE topic_id = %s').format(Identifier(self.topics_table)) + + def update_meta_query(self): + return SQL( + 'UPDATE {} SET metadata= %s ' + 'WHERE topic_id = %s').format(Identifier(self.meta_table)) + + def get_aggregation_list(self): + return ['AVG', 'MIN', 'MAX', 'COUNT', 'SUM', 'BIT_AND', 'BIT_OR', + 'BOOL_AND', 'BOOL_OR', 'MEDIAN', 'STDDEV', 'STDDEV_POP', + 'STDDEV_SAMP', 'VAR_POP', 'VAR_SAMP', 'VARIANCE'] + + def insert_agg_topic_stmt(self): + return SQL( + 'INSERT INTO {} (agg_topic_name, agg_type, agg_time_period) ' + 'VALUES (%s, %s, %s)' + 'RETURNING agg_topic_id').format(Identifier(self.agg_topics_table)) + + def update_agg_topic_stmt(self): + return SQL( + 'UPDATE {} SET agg_topic_name = %s ' + 'WHERE agg_topic_id = %s').format( + Identifier(self.agg_topics_table)) + + def replace_agg_meta_stmt(self): + return SQL( + 'INSERT INTO {} VALUES (%s, %s) ' + 'ON CONFLICT (agg_topic_id) DO UPDATE ' + 'SET metadata = EXCLUDED.metadata').format( + Identifier(self.agg_meta_table)) + + def get_topic_map(self): + query = SQL( + 'SELECT topic_id, topic_name, LOWER(topic_name) ' + 'FROM {}').format(Identifier(self.topics_table)) + rows = self.select(query) + id_map = {key: tid for tid, _, key in rows} + name_map = {key: name for _, name, key in rows} + return id_map, name_map + + def get_topic_meta_map(self): + query = SQL( + 'SELECT topic_id, metadata ' + 'FROM {}').format(Identifier(self.meta_table)) + rows = self.select(query) + meta_map = {tid: jsonapi.loads(meta) if meta else None for tid, meta in rows} + return meta_map + + def get_agg_topics(self): + query = SQL( + 'SELECT agg_topic_name, agg_type, agg_time_period, metadata ' + 'FROM {} as t, {} as m ' + 'WHERE t.agg_topic_id = m.agg_topic_id').format( + Identifier(self.agg_topics_table), Identifier(self.agg_meta_table)) + try: + rows = self.select(query) + except ProgrammingError as exc: + if exc.pgcode == errorcodes.UNDEFINED_TABLE: + return [] + raise + return [(name, type_, tp, ast.literal_eval(meta)['configured_topics']) + for name, type_, tp, meta in rows] + + def get_agg_topic_map(self): + query = SQL( + 'SELECT agg_topic_id, LOWER(agg_topic_name), ' + 'agg_type, agg_time_period ' + 'FROM {}').format(Identifier(self.agg_topics_table)) + try: + rows = self.select(query) + except ProgrammingError as exc: + if exc.pgcode == errorcodes.UNDEFINED_TABLE: + return {} + raise + return {(name, type_, tp): id_ for id_, name, type_, tp in rows} + + def query_topics_by_pattern(self, topic_pattern): + query = SQL( + 'SELECT topic_name, topic_id ' + 'FROM {} ' + 'WHERE topic_name ~* %s').format(Identifier(self.topics_table)) + return dict(self.select(query, (topic_pattern,))) + + def create_aggregate_store(self, agg_type, agg_time_period): + table_name = agg_type + '_' + agg_time_period + self.execute_stmt(SQL( + 'CREATE TABLE IF NOT EXISTS {} (' + 'ts TIMESTAMP NOT NULL, ' + 'topic_id INTEGER NOT NULL, ' + 'agg_value DOUBLE PRECISION NOT NULL, ' + 'topics_list TEXT, ' + 'UNIQUE (ts, topic_id)' + ')').format(Identifier(table_name))) + self.execute_stmt(SQL( + 'CREATE INDEX IF NOT EXISTS {} ON {} (ts ASC)').format( + Identifier('idx_' + table_name), + Identifier(table_name))) + self.commit() + + def insert_aggregate_stmt(self, table_name): + return SQL( + 'INSERT INTO {} VALUES (%s, %s, %s, %s) ' + 'ON CONFLICT (ts, topic_id) DO UPDATE ' + 'SET agg_value = EXCLUDED.agg_value, ' + 'topics_list = EXCLUDED.topics_list').format( + Identifier(table_name)) + + def collect_aggregate(self, topic_ids, agg_type, start=None, end=None): + if (isinstance(agg_type, str) and + agg_type.upper() not in self.get_aggregation_list()): + raise ValueError('Invalid aggregation type {}'.format(agg_type)) + query = [ + SQL('SELECT {}(CAST(value_string as float)), COUNT(value_string)'.format( + agg_type.upper())), + SQL('FROM {}').format(Identifier(self.data_table)), + SQL('WHERE topic_id in ({})').format( + SQL(', ').join(Literal(tid) for tid in topic_ids)), + ] + if start is not None: + query.append(SQL(' AND ts >= {}').format(Literal(start))) + if end is not None: + query.append(SQL(' AND ts < {}').format(Literal(end))) + rows = self.select(SQL('\n').join(query)) + return rows[0] if rows else (0, 0) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..7e07da1 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- {{{ +# ===----------------------------------------------------------------------=== +# +# Installable Component of Eclipse VOLTTRON +# +# ===----------------------------------------------------------------------=== +# +# Copyright 2022 Battelle Memorial Institute +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# ===----------------------------------------------------------------------=== +# }}} + +import os +from pathlib import Path +import shutil +import sys +import tempfile + +import pytest +from volttrontesting.fixtures.volttron_platform_fixtures import volttron_instance + + +# the following assumes that the testconf.py is in the tests directory. +volttron_src_path = Path(__file__).resolve().parent.parent.joinpath("src") + +assert volttron_src_path.exists() + +print(sys.path) +if str(volttron_src_path) not in sys.path: + print(f"Adding source path {volttron_src_path}") + sys.path.insert(0, str(volttron_src_path)) +