diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..fda03bc --- /dev/null +++ b/.coveragerc @@ -0,0 +1,30 @@ +# .coveragerc to control coverage.py +[run] +branch = True +omit = src/core/tests/*, **/__init__.py, src/samples/** + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True +fail_under = 85 +show_missing = True + +[html] +title = Longitude Core Coverage +directory = coverage_html_report diff --git a/.env.sample b/.env.sample new file mode 100644 index 0000000..41a60f7 --- /dev/null +++ b/.env.sample @@ -0,0 +1,14 @@ +# Remote db +CARTO_API_KEY= +CARTO_USER= +CARTO_TABLE= + +# Local db +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=longitude +POSTGRES_USER=longitude +POSTGRES_PASS=longitude + +# Cache +REDIS_PASSWORD=longitude \ No newline at end of file diff --git a/.gitignore b/.gitignore index 24482e2..d34324f 100644 --- a/.gitignore +++ b/.gitignore @@ -114,3 +114,11 @@ dmypy.json # Pyre type checker .pyre/ +# VScode IDE +.vscode + +# PyCharm IDE +.idea + +# Coverage report +coverage_html_report diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..67ad195 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,25 @@ +FROM python:3.6.6-slim + +ENV PYTHONUNBUFFERED=1 + +WORKDIR /usr/src/app +ENV PATH="$PATH:/usr/src/app" + + +# Install anything missing in the slim image, install dependencies +# Remove anything only needed for building +# This is run as one line so docker caches it as a single layer. + +COPY pyproject.toml . + +RUN set -x \ + && apt-get update \ + && apt-get install -y --no-install-recommends git gcc curl \ + && curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py | python \ + && $HOME/.poetry/bin/poetry install \ + && apt-get remove -y --purge git gcc curl \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +COPY . . diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000..d95bcc3 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,127 @@ +#!/usr/bin/env groovy + +// Global Environment variables +FAILURE_EMAIL = "build@geographica.gs" +DESIRED_REPOSITORY = "https://github.com/GeographicaGS/Longitude.git" +PUBLISH_BRANCH = "publish" +REPO_NAME = "longitude" + +pipeline{ + agent { node { + label 'master' + } } + + options { + ansiColor('xterm') + } + + stages { + stage('Preparing for build') { + agent { node { + label 'master' + } } + steps { + prepareBuild() + } + } + stage ('Building') { + agent { node { + label 'docker' + } } + steps { + sh "docker build --pull=true -t geographica/${REPO_NAME}:${git_commit} ." + } + } + stage('Linter') + { + agent { node { + label 'docker' + } } + steps { + sh "docker run --rm geographica/${REPO_NAME}:${git_commit} /root/.poetry/bin/poetry run pylint --ignore=samples -E longitude" + } + } + stage('Testing') + { + agent { node { + label 'docker' + } } + steps { + sh "docker run --rm geographica/${REPO_NAME}:${git_commit} /root/.poetry/bin/poetry run pytest --cov=longitude.core longitude/core/tests/" + } + } + stage ('Publish') { + agent { node { + label 'docker' + } } + when { anyOf { + branch "${PUBLISH_BRANCH}" + } } + steps{ + // TODO: this must be "publish" but we keep "build" while testing the Jenkins pipeline + sh "docker run --rm geographica/${REPO_NAME}:${git_commit} /root/.poetry/bin/poetry build" + } + } + // TODO: Stage to check that module can be imported + } + post { + always { + deleteDir() /* clean up our workspace */ + } + unstable { + notifyStatus(currentBuild.currentResult) + } + failure { + notifyStatus(currentBuild.currentResult) + } + } +} + +def prepareBuild() { + script { + checkout scm + + sh "git rev-parse --short HEAD > .git/git_commit" + sh "git --no-pager show -s --format='%ae' HEAD > .git/git_committer_email" + + workspace = pwd() + branch_name = "${ env.BRANCH_NAME }".replaceAll("/", "_") + git_commit = readFile(".git/git_commit").replaceAll("\n", "").replaceAll("\r", "") + //git_commit = sh(returnStdout: true, script: "git describe").trim() + build_name = "${git_commit}" + job_name = "${ env.JOB_NAME }".replaceAll("%2F", "/") + committer_email = readFile(".git/git_committer_email").replaceAll("\n", "").replaceAll("\r", "") + GIT_URL = sh(returnStdout: true, script: "git config --get remote.origin.url").trim() + if ( GIT_URL != DESIRED_REPOSITORY ) { + error("This jenkinsfile is configured for '${ DESIRED_REPOSITORY }' but it was executed from '${ GIT_URL }'.") + } + } +} + +def notifyStatus(buildStatus) { + def status + def send_to + + try { + switch (branch_name) { + case 'master': + send_to = "${ committer_email }, ${ FAILURE_EMAIL }" + break + default: + send_to = "${ committer_email }" + break + } + } catch(Exception ex) { + send_to = "${ FAILURE_EMAIL }" + } + + echo "Sending error email to: ${ send_to }" + try { + mail to: "${ send_to }", + from: "Jenkins Geographica ", + subject: "[${ buildStatus }] ${currentBuild.fullDisplayName}", + body: "Something is wrong in '${currentBuild.fullDisplayName}'. \n\nSee ${env.BUILD_URL} for more details." + } catch(Exception ex) { + echo "Something was wrong sending error email :(" + } +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7d63f15 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Geografía Aplicada S.L + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..b365a78 --- /dev/null +++ b/README.md @@ -0,0 +1,121 @@ +# Longitude + +A **new** bunch of middleware functions to build applications on top of CARTO. + +## Roadmap + +- [ ] Database model + - [x] CARTO data source + - [x] Basic parametrized queries (i.e. templated queries) + - [x] Protected parametrized queries (i.e. avoiding injection) + - [ ] Bind/dynamic parameters in queries (server-side render) + - [x] Postgres data source + - [x] psycopg2 + - [x] SQLAlchemy + - [x] Cache + - [x] Base cache + - [x] Put + - [x] Get + - [x] Key generation + - [x] Flush + - [x] Tests + - [x] Ram Cache + - [x] Tests + - [x] Redis Cache + - [x] Tests + - [x] Documentation + - [x] Sample scripts + - [x] Unit tests + - [x] Sample scripts + +- [x] Config + +- [x] CI PyPi versioning + +- [ ] Data manipulation + - [ ] Carto + - [ ] DataFrame read/write + - [ ] COPY + - [ ] Postgres + - [ ] DataFrame read/write + - [ ] COPY + +- [ ] Validations + - [ ] Marshmallow + - [ ] Wrapper (?) + - [ ] Documentation + +- [ ] Swagger + - [ ] Decorators + - [ ] Flassger (?) + - [ ] OAuth integration + - [ ] Postman integration + - [ ] Documentation + +- [ ] SQL Alchemy + - [ ] Model definition + - [ ] Jenkins integration + - [ ] Documentation + +- [ ] OAuth + - [ ] Role mapping + - [ ] Token storage + - [ ] Documentation + +## As final user... + +How to use: +```bash +pip install longitude +``` + +Or: +```bash +pipenv install longitude +``` + +Or: +```bash +poetry add longitude +``` + +Or install from GitHub: +```bash +pip install -e git+https://github.com/GeographicaGS/Longitude#egg=longitude +``` + +## As developer... + +### First time + +1. Install ```poetry``` using the [recommended process](https://github.com/sdispater/poetry#installation) + 1. poetry is installed globally as a tool + 1. It works along with virtualenvironments +1. Create a virtual environment for Python 3.x (check the current development version in ```pyproject.toml```) + 1. You can create it wherever you want but do not put it inside the project + 1. A nice place is ```$HOME/virtualenvs/longitude``` +1. Clone the ```longitude``` repo +1. `cd` to the repo and: + 1. Activate the virtual environment: `. ~/virtualenvs/longitude/bin/activate` + 1. Run `poetry install` +1. Configure your IDE to use the virtual environment + +### Daily + +1. Remember to activate the virtual environment + +### Why Poetry? + +Because it handles development dependencies and packaging with a single file (```pyproject.toml```), which is [already standard](https://flit.readthedocs.io/en/latest/pyproject_toml.html). + +## Sample scripts + +These are intended to be used with real databases (i.e. those in your profile) to check features of the library. They must be run from the virtual environment. + +## Testing and coverage + +The [```pytest-cov```](https://pytest-cov.readthedocs.io/en/latest/) plugin is being used. Coverage configuration is at ```.coveragerc``` (including output folder). + +You can run something like: ```pytest --cov-report=html --cov=core core``` and the results will go in the defined html folder. + +There is a bash script called ```generate_core_coverage.sh``` that runs the coverage analysis and shows the report in your browser. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..2b21bec --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,28 @@ +version: "3" +services: + python: + image: python:3.6.3-onbuild + command: bash + volumes: + - .:/usr/longitude/app + + cache: + image: redis:alpine + ports: + - "6379:6379" + command: + - redis-server + - --requirepass longitude + - --maxmemory 256mb + - --maxmemory-policy allkeys-lru + restart: unless-stopped + + postgres: + image: kartoza/postgis + ports: + - "5432:5432" + environment: + POSTGRES_USER: longitude + POSTGRES_PASS: longitude + volumes: + - ./data/:/var/lib/postgresql/data/pgdata diff --git a/generate_core_coverage.sh b/generate_core_coverage.sh new file mode 100755 index 0000000..fa2f0ce --- /dev/null +++ b/generate_core_coverage.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +pytest --cov-report=html --cov=longitude.core longitude/core/tests/ +sensible-browser coverage_html_report/index.html diff --git a/longitude/__init__.py b/longitude/__init__.py new file mode 100644 index 0000000..bb67a43 --- /dev/null +++ b/longitude/__init__.py @@ -0,0 +1 @@ +from .core import * diff --git a/longitude/core/__init__.py b/longitude/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/longitude/core/caches/__init__.py b/longitude/core/caches/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/longitude/core/caches/base.py b/longitude/core/caches/base.py new file mode 100644 index 0000000..c2f4c1d --- /dev/null +++ b/longitude/core/caches/base.py @@ -0,0 +1,86 @@ +import hashlib +import logging +import pickle + +from longitude.core.common.query_response import LongitudeQueryResponse +from ..common.config import LongitudeConfigurable + + +class LongitudeCache(LongitudeConfigurable): + _default_config = {} + + def __init__(self, config=None): + super().__init__(config=config) + self.logger = logging.getLogger(self.__class__.__module__) + + @staticmethod + def generate_key(query_template, params): + """ + This is the default key generation algorithm, based in a digest from the sha256 hash of the query and parameters + + Override this method to provide your own key generation in case you need a specific way to store your cache. + + :param query_template: Query template (including placeholders) as it should be asked to the database + :param params: Dictionary of values to be replaced in the placeholders in a safe manner + :return: A (most likely) unique hash, generated from the query text + """ + query_payload = str(query_template) + str(params) + return hashlib.sha256(query_payload.encode('utf-8')).hexdigest() + + def setup(self): + raise NotImplementedError + + @property + def is_ready(self): + raise NotImplementedError + + def get(self, query_template, query_params=None): + if query_params is None: + query_params = {} + payload = self.execute_get(self.generate_key(query_template, query_params)) + return self.deserialize_payload(payload) + + def put(self, query_template, payload, query_params=None, expiration_time_s=None): + if query_params is None: + query_params = {} + if not isinstance(payload, LongitudeQueryResponse): + raise TypeError('Payloads must be instances of LongitudeQueryResponse!') + return self.execute_put(self.generate_key(query_template, query_params), + self.serialize_payload(payload), + expiration_time_s=expiration_time_s) + + def execute_get(self, key): + """ + Custom get action over the cache. + + :return: Query response as it was saved if hit. None if miss. + """ + raise NotImplementedError + + def execute_put(self, key, payload, expiration_time_s=None): + """ + Custom put action over the cache. + + :return: True if key was overwritten. False if key was new in the cache. + """ + raise NotImplementedError + + def flush(self): + """ + Custom action to make the cache empty + + :return: + """ + raise NotImplementedError + + @staticmethod + def serialize_payload(payload): + if payload: + return pickle.dumps(payload) + return None + + @staticmethod + def deserialize_payload(payload): + if payload: + return pickle.loads(payload) + return None diff --git a/longitude/core/caches/ram.py b/longitude/core/caches/ram.py new file mode 100644 index 0000000..ec5d1e4 --- /dev/null +++ b/longitude/core/caches/ram.py @@ -0,0 +1,36 @@ +from .base import LongitudeCache + + +class RamCache(LongitudeCache): + """ + This is the simplest cache we can use: a dictionary in memory. + """ + _values = {} + + def setup(self): + self.flush() + + @property + def is_ready(self): + return True + + def execute_get(self, key): + return self._values.get(key) + + def execute_put(self, key, payload, expiration_time_s=None): + if expiration_time_s: + self.logger.warning("RamCache does not support expiration time. Ignoring configuration.") + is_overwrite = key in self._values.keys() + self._values[key] = payload + return is_overwrite + + def flush(self): + self._values = {} + + @staticmethod + def serialize_payload(payload): + return payload + + @staticmethod + def deserialize_payload(payload): + return payload diff --git a/longitude/core/caches/redis.py b/longitude/core/caches/redis.py new file mode 100644 index 0000000..dd287b7 --- /dev/null +++ b/longitude/core/caches/redis.py @@ -0,0 +1,55 @@ +import redis + +from .base import LongitudeCache + + +class RedisCache(LongitudeCache): + _default_config = { + 'host': 'localhost', + 'port': 6379, + 'db': 0, + 'password': None + } + + _values = None + + def setup(self): + self._values = redis.Redis( + host=self.get_config('host'), + port=self.get_config('port'), + db=self.get_config('db'), + password=self.get_config('password') + ) + + @property + def is_ready(self): + try: + self._values.ping() + return True + except TimeoutError: + return False + except redis.exceptions.ConnectionError: + self.logger.error( + 'Cannot connect to Redis server at %s:%d.' % (self.get_config('host'), self.get_config('port'))) + return False + except redis.exceptions.ResponseError as e: + msg = str(e) + if str(e) == 'invalid password': + msg = 'Redis password is wrong.' + elif str(e) == "NOAUTH Authentication required.": + msg = 'Redis password required.' + self.logger.error(msg) + return False + + def execute_get(self, key): + return self._values.get(name=key) + + def execute_put(self, key, payload, expiration_time_s=None): + overwrite = self._values.exists(key) == 1 + self._values.set(name=key, value=payload) + if expiration_time_s: + self._values.expire(name=key, time=expiration_time_s) + return overwrite + + def flush(self): + self._values.flushall() diff --git a/longitude/core/common/__init__.py b/longitude/core/common/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/longitude/core/common/__init__.py @@ -0,0 +1 @@ + diff --git a/longitude/core/common/config.py b/longitude/core/common/config.py new file mode 100644 index 0000000..d7d2e88 --- /dev/null +++ b/longitude/core/common/config.py @@ -0,0 +1,125 @@ +import logging +import os + +from .exceptions import LongitudeConfigError + + +class EnvironmentConfiguration: + prefix = 'LONGITUDE' + separator = '__' + config = None + + @classmethod + def _load_environment_variables(cls): + """ + It loads environment variables into the internal dictionary. + + Load is done by grouping and nesting environment variables following this convention: + 1. Only variables starting with the prefix are taken (i.e. LONGITUDE) + 2. For each separator used, a new nested object is created inside its parent (i.e. SEPARATOR is '__') + 3. The prefix indicates the root object (i.e. LONGITUDE__ is the default root dictionary) + + :return: None + """ + cls.config = {} + for v in [k for k in os.environ.keys() if k.startswith(cls.prefix)]: + value_path = v.split(cls.separator)[1:] + cls._append_value(os.environ.get(v), value_path, cls.config) + + @classmethod + def get(cls, key=None): + """ + Returns a nested config value from the configuration. It allows getting values as a series of joined keys using + dot ('.') as separator. This will search for keys in nested dictionaries until a final value is found. + + :param key: String in the form of 'parent.child.value...'. It must replicate the configuration nested structure. + :return: It returns an integer, a string or a nested dictionary. If none of these is found, it returns None. + """ + + # We do a lazy load in the first access + if cls.config is None: + cls._load_environment_variables() + + if key is not None: + return cls._get_nested_key(key, cls.config) + else: + return cls.config + + @staticmethod + def _get_nested_key(key, d): + """ + + :param key: + :param d: + :return: + """ + key_path = key.split('.') + root_key = key_path[0] + + if root_key in d.keys(): + if len(key_path) == 1: + return d[root_key] # If a single node is in the path, it is the final one + # If there are more than one nodes left, keep digging... + return EnvironmentConfiguration._get_nested_key('.'.join(key_path[1:]), d[root_key]) + else: + return None # Nested key was not found in the config + + @staticmethod + def _append_value(value, value_path, d): + root_path = value_path[0].lower() + if len(value_path) == 1: + + try: + d[root_path] = int(value) + except ValueError: + d[root_path] = value + else: + if root_path not in d.keys(): + d[root_path] = {} + EnvironmentConfiguration._append_value(value, value_path[1:], d[root_path]) + + +class LongitudeConfigurable: + """ + Any subclass will have a nice get_config(key) method to retrieve configuration values + """ + _default_config = {} + _config = {} + + def __init__(self, config=None): + if config is not None and not isinstance(config, dict): + raise TypeError('Config object must be a dictionary') + + self._config = config or {} + self.logger = logging.getLogger(__class__.__module__) + default_keys = set(self._default_config.keys()) + config_keys = set(config.keys()) if config is not None else set([]) + unexpected_config_keys = list(config_keys.difference(default_keys)) + using_defaults_for = list(default_keys.difference(config_keys)) + + unexpected_config_keys.sort() + using_defaults_for.sort() + + for k in unexpected_config_keys: + self.logger.warning("%s is an unexpected config value" % k) + + for k in using_defaults_for: + self.logger.info("%s key is using default value" % k) + + def get_config(self, key=None): + """ + Getter for configuration values + :param key: Key in the configuration dictionary. If no key is provided, the full config is returned. + :return: Current value of the chosen key + """ + if key is None: + config_template = dict(self._default_config) + config_template.update(self._config) + return config_template + + if key not in self._default_config.keys(): + raise LongitudeConfigError("%s is not a valid config value. Check your defaults as reference.") + try: + return self._config[key] + except (TypeError, KeyError): + return self._default_config[key] diff --git a/longitude/core/common/exceptions.py b/longitude/core/common/exceptions.py new file mode 100644 index 0000000..335282b --- /dev/null +++ b/longitude/core/common/exceptions.py @@ -0,0 +1,18 @@ +class LongitudeBaseException(Exception): + pass + + +class LongitudeRetriesExceeded(LongitudeBaseException): + pass + + +class LongitudeQueryCannotBeExecutedException(LongitudeBaseException): + pass + + +class LongitudeWrongQueryException(LongitudeBaseException): + pass + + +class LongitudeConfigError(LongitudeBaseException): + pass diff --git a/longitude/core/common/helpers.py b/longitude/core/common/helpers.py new file mode 100644 index 0000000..b10ac8d --- /dev/null +++ b/longitude/core/common/helpers.py @@ -0,0 +1,16 @@ +from longitude.core.data_sources.base import DataSource + + +class DisabledCache: + data_source = None + + def __init__(self, ds): + if ds and not isinstance(ds, DataSource): + raise TypeError('DisabledCache can only be applied to DataSource subclasses.') + self.data_source = ds + + def __enter__(self): + self.data_source.disable_cache() + + def __exit__(self, *args): + self.data_source.enable_cache() diff --git a/longitude/core/common/query_response.py b/longitude/core/common/query_response.py new file mode 100644 index 0000000..c1395d6 --- /dev/null +++ b/longitude/core/common/query_response.py @@ -0,0 +1,16 @@ + +class LongitudeQueryResponse: + def __init__(self, rows=None, fields=None, profiling=None): + self.rows = rows or [] + self.fields = fields or {} + self.profiling = profiling or {} + self._from_cache = False + + + @property + def comes_from_cache(self): + return self._from_cache + + def mark_as_cached(self): + self._from_cache = True + diff --git a/longitude/core/data_sources/README.md b/longitude/core/data_sources/README.md new file mode 100644 index 0000000..e9e7290 --- /dev/null +++ b/longitude/core/data_sources/README.md @@ -0,0 +1,61 @@ +# How to add your own Data Source + +**IMPORTANT**: this is useful information if you are **developing the Longitude library**. If you are just using it, this information is irrelevant for you. + +If you want to add data source classes to Longitude you must create a new class that inherits from DataSource. + +* That class must implement ```execute_query()``` and ```parse_response()``` +* It can also override: + * ```__init___```: if it needs instance attributes to be defined + * ```setup()```: if it needs some process to be done **before** executing queries + * ```is_ready``` (*property*): if you need a ```setup()``` here you return if setup was successful. +* It can also define configuration fields using the ```_default_config``` dictionary + * Keep in mind that if you do not add a default key/value for a config, it will not be allowed as configuration value. + * Only existent keys in the default dictionary are allowed as configuration values. + +### Template + +Feel free to copy/paste this code and customize it: + +```python + +from core.data_sources.base import DataSource + + +class MyDatabaseTechnologyDataSource(DataSource): + _default_config = { + "...": "..." + } + + def __init__(self, config=None, cache_class=None): + # TODO: Here you can define instance values like cursors, connections, etc... + super().__init__(config, cache_class=cache_class) + + def setup(self): + # TODO: Write how your database connection is stablised, how to log... + super().setup() + + @property + def is_ready(self): + # TODO: Write how, after setup, you can know if queries can be executed (return True) or not (return False) + return False + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + # TODO: Write how the database query is executed and return the response or None + pass + + def parse_response(self, response): + # TODO: Write how the database query response is converted into a LongitudeQueryResponse object + pass + +``` + +### Do I need to override the methods always? + +No. If your data source is, for example, some REST API (or any service without session or permament connection), you do not need any preparation. You can just execute queries and parse responses. + +Sometimes the setup thing is needed for performance (i.e. instead of connecting/disconnecting a database in each query.) + +### Must I implement the execute and parse methods always? + +Yes. Those are the interface methods and are mandatory. \ No newline at end of file diff --git a/longitude/core/data_sources/__init__.py b/longitude/core/data_sources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/longitude/core/data_sources/base.py b/longitude/core/data_sources/base.py new file mode 100644 index 0000000..379421d --- /dev/null +++ b/longitude/core/data_sources/base.py @@ -0,0 +1,199 @@ +import logging +from time import time +from typing import Type + +from ..caches.base import LongitudeCache +from ..common.config import LongitudeConfigurable +from ..common.exceptions import (LongitudeQueryCannotBeExecutedException, + LongitudeRetriesExceeded) + + +class DataSourceQueryConfig: + def __init__(self, retries=0, custom=None): + self.retries = retries + + # Depending on the specific interface (i.e.: CARTO, Postgres...), we might also need to specify per-query values + self.custom = custom or {} + + def copy(self): + return DataSourceQueryConfig(self.retries, self.custom) + + +class DataSource(LongitudeConfigurable): + + def __init__(self, config=None, cache_class: Type[LongitudeCache] = None): + super().__init__(config=config) + self.logger = logging.getLogger(self.__class__.__module__) + self._default_query_config = DataSourceQueryConfig() + self._use_cache = True + self._cache = None + + if cache_class: + if not issubclass(cache_class, LongitudeCache): + raise TypeError('Cache must derive from LongitudeCache or be None') + else: + self._cache = cache_class(config=config.get('cache')) + + def setup(self): + if self._cache: + self._cache.setup() + + @property + def tries(self): + return self._default_query_config.retries + 1 + + def set_retries(self, value=0): + """ + Sets the amount of times that a query will be re-asked in case of failure. + Zero means that there will be no RE-tries, BUT the first try will be done so the query is sent once at least. + + :param value: Amount of desired retries. Negative values will be forced to 0. + """ + self._default_query_config.retries = max(0, value) + + def set_custom_query_default(self, key, value): + self._default_query_config.custom[key] = value + + def copy_default_query_config(self): + """ + Helper for custom queries. When doing a query with some different configuration, copy the default one, modify it + and pass it to the query. + + :return: A new object with the same content as the current default query config + """ + return self._default_query_config.copy() + + @property + def is_ready(self): + """ + This method must be implemented by children classes to reflect that setup was ok and must call super().is_ready + :return: True if setup() call was successful. False if not. + """ + return not self._cache or self._cache.is_ready + + def enable_cache(self): + self._use_cache = True + + def disable_cache(self): + self._use_cache = False + + def write_data_frame(self, data_frame, table_name): + """ + Writes a Pandas data frame in the specified table + :param data_frame: DataFrame to be written + :param table_name: String indicating target table + :return: + """ + raise NotImplementedError + + def read_data_frame(self, table_name): + """ + Reads the target table as a Pandas DataFrame + :param table_name: String indicating target table + :return: Data as DataFrame + """ + raise NotImplementedError + + def committed_query(self, query_template, params=None): + """ + This is a shortcut for INSERT queries and similar ones dealing with simple update operations. + + Makes a default non-cached query committing the result. If you need to specify more details such as cache or + query specific values, use .query(...) + + :param query_template: Unformatted SQL query + :param params: Values to be passed to the query when formatting it + :return: + """ + return self.query(query_template, params=params, use_cache=False, needs_commit=True) + + def cached_query(self, query_template, params=None, expiration_time_s=None): + """ + This is a shortcut for SELECT queries and similar ones requesting simple data. + + Makes a default cached query. This means that no commit is done and no specific config for the query is + available. If you need any of these, use .query(...) + + :param query_template: Unformatted SQL query + :param params: Values to be passed to the query when formatting it + :param expiration_time_s: Amount of seconds for the payload to be stored (if cache supports this) + :return: Result of the query + """ + return self.query(query_template, params=params, expiration_time_s=expiration_time_s) + + def query(self, query_template, params=None, use_cache=True, expiration_time_s=None, needs_commit=False, + query_config=None, **opts): + """ + This method has to be called to interact with the data source. Each children class will have to implement + its own .execute_query(...) with the specific behavior for each interface. + + :param query_template: Unformatted SQL query + :param params: Values to be passed to the query when formatting it + :param use_cache: Boolean to indicate if this specific query should use cache or not (default: True) + :param expiration_time_s: If using cache and cache supports expiration, amount of seconds for the payload to be stored + :param needs_commit: Boolean to indicate if this specific query needs to commit to db (default: False) + :param query_config: Specific query configuration. If None, the default one will be used. + :param opts: + :return: Result of querying the database + """ + if params is None: + params = {} + + if query_config is None: + query_config = self._default_query_config + + normalized_response = None + if self._cache and self._use_cache and use_cache: + start = time() + normalized_response = self._cache.get(query_template, params) + if normalized_response: + normalized_response.profiling['cache_time'] = time() - start + + if normalized_response: + normalized_response.mark_as_cached() + return normalized_response + else: + for r in range(self.tries): + try: + response = self.execute_query(query_template=query_template, + params=params, + needs_commit=needs_commit, + query_config=query_config, + **opts) + + normalized_response = self.parse_response(response) + if self._cache and self._use_cache and use_cache: + self._cache.put( + query_template, + payload=normalized_response, + query_params=params, + expiration_time_s=expiration_time_s + ) + + return normalized_response + except LongitudeQueryCannotBeExecutedException: + self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) + raise LongitudeRetriesExceeded + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + """ + + :raise LongitudeQueryCannotBeExecutedException + :param formatted_query: + :param needs_commit: + :param query_config: + :param opts: + :return: + """ + raise NotImplementedError + + def parse_response(self, response): + """" + :param response from an succesfully executed query + :return: A LongitudeQueryResponse object + """ + raise NotImplementedError + + def flush_cache(self): + if self._cache and self._cache.is_ready: + self._cache.flush() diff --git a/longitude/core/data_sources/carto.py b/longitude/core/data_sources/carto.py new file mode 100644 index 0000000..69e7901 --- /dev/null +++ b/longitude/core/data_sources/carto.py @@ -0,0 +1,87 @@ +from carto.auth import APIKeyAuthClient +from carto.exceptions import CartoException +from carto.sql import BatchSQLClient, SQLClient + +from ..common.query_response import LongitudeQueryResponse +from .base import DataSource, LongitudeQueryCannotBeExecutedException + + +class CartoDataSource(DataSource): + SUBDOMAIN_URL_PATTERN = "https://%s.carto.com" + ON_PREMISE_URL_PATTERN = "https://%s/user/%s" + _default_config = { + 'api_version': 'v2', + 'uses_batch': False, + 'on_premise_domain': '', + 'api_key': '', + 'user': '', + 'cache': None + } + + def __init__(self, config=None, cache_class=None): + super().__init__(config, cache_class=cache_class) + self._sql_client = None + self._batch_client = None + self.set_custom_query_default('do_post', False) + self.set_custom_query_default('parse_json', True) + self.set_custom_query_default('format', 'json') + + def setup(self): + auth_client = APIKeyAuthClient(api_key=self.get_config('api_key'), base_url=self.base_url) + self._sql_client = SQLClient(auth_client, api_version=self.get_config('api_version')) + + if self.get_config('uses_batch'): + self._batch_client = BatchSQLClient(auth_client) + super().setup() + + @property + def base_url(self): + user = self.get_config('user') + on_premise_domain = self.get_config('on_premise_domain') + if on_premise_domain: + base_url = self.ON_PREMISE_URL_PATTERN % (on_premise_domain, user) + else: + base_url = self.SUBDOMAIN_URL_PATTERN % user + return base_url + + @property + def is_ready(self): + if super().is_ready: + sql_setup_ready = self._sql_client is not None + batch_setup_ready = not self.get_config('uses_batch') or (self._batch_client is not None) + is_ready = sql_setup_ready and batch_setup_ready and self.get_config('user') != '' + return is_ready + else: + return False + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + # TODO: Here we are parsing the parameters and taking responsability for it. We do not make any safe parsing as + # this will be used in a backend-to-backend context and we build our own queries. + # --- + # This is also problematic as quoting is not done and relies in the query template + # --- + # Can we use the .mogrify method in psycopg2 to render a query as it is going to be executed ? -> NO + # -> .mogrify is a cursor method but in CARTO connections we lack a cursor. + # --- + # There is an open issue in CARTO about having separated parameters and binding them in the server: + # https://github.com/CartoDB/Geographica-Product-Coordination/issues/57 + formatted_query = query_template % params + + parse_json = query_config.custom['parse_json'] + do_post = query_config.custom['do_post'] + format_ = query_config.custom['format'] + try: + return self._sql_client.send(formatted_query, parse_json=parse_json, do_post=do_post, format=format_) + + except CartoException as e: + raise LongitudeQueryCannotBeExecutedException + + def parse_response(self, response): + return LongitudeQueryResponse( + rows=response['rows'], + fields=response['fields'], + profiling={ + 'response_time': response['time'], + 'total_rows': response['total_rows'] + } + ) diff --git a/longitude/core/data_sources/postgres/__init__.py b/longitude/core/data_sources/postgres/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/longitude/core/data_sources/postgres/common.py b/longitude/core/data_sources/postgres/common.py new file mode 100644 index 0000000..b86f37f --- /dev/null +++ b/longitude/core/data_sources/postgres/common.py @@ -0,0 +1,5 @@ +from psycopg2.extensions import string_types + + +def psycopg2_type_as_string(type_id): + return string_types[type_id] diff --git a/longitude/core/data_sources/postgres/default.py b/longitude/core/data_sources/postgres/default.py new file mode 100644 index 0000000..df51576 --- /dev/null +++ b/longitude/core/data_sources/postgres/default.py @@ -0,0 +1,75 @@ +from time import time + +import psycopg2 +import psycopg2.extensions +from .common import psycopg2_type_as_string + +from ...common.query_response import LongitudeQueryResponse +from ..base import DataSource + + +class DefaultPostgresDataSource(DataSource): + _default_config = { + 'host': 'localhost', + 'port': 5432, + 'db': '', + 'user': 'postgres', + 'password': '' + } + + def __init__(self, config=None, cache_class=None): + self._conn = None + self._cursor = None + super().__init__(config, cache_class=cache_class) + + def __del__(self): + if self._cursor: + self._cursor.close() + if self._conn: + self._conn.close() + + def setup(self): + self._conn = psycopg2.connect( + host=self.get_config('host'), + port=self.get_config('port'), + database=self.get_config('db'), + user=self.get_config('user'), + password=self.get_config('password') + ) + + self._cursor = self._conn.cursor() + super().setup() + + def is_ready(self): + return super().is_ready and self._conn and self._cursor + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + data = { + 'fields': [], + 'rows': [], + 'profiling': {} + } + + start = time() + self._cursor.execute(query_template, params) + data['profiling']['execute_time'] = time() - start + + if self._cursor.description: + data['fields'] = self._cursor.description + data['rows'] = self._cursor.fetchall() + + if needs_commit: + start = time() + self._conn.commit() + data['profiling']['commit_time'] = time() - start + + return data + + + def parse_response(self, response): + if response: + raw_fields = response['fields'] + fields_names = {n.name: {'type': psycopg2_type_as_string(n.type_code).name} for n in raw_fields} + rows = [{raw_fields[i].name: f for i, f in enumerate(row_data)} for row_data in response['rows']] + return LongitudeQueryResponse(rows=rows, fields=fields_names, profiling=response['profiling']) + return None diff --git a/longitude/core/data_sources/postgres/sqlalchemy.py b/longitude/core/data_sources/postgres/sqlalchemy.py new file mode 100644 index 0000000..a75807d --- /dev/null +++ b/longitude/core/data_sources/postgres/sqlalchemy.py @@ -0,0 +1,79 @@ +from time import time + +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from .common import psycopg2_type_as_string +from longitude.core.common.query_response import LongitudeQueryResponse +from longitude.core.data_sources.base import DataSource + + +class SQLAlchemyDataSource(DataSource): + _default_config = { + 'host': 'localhost', + 'port': 5432, + 'db': '', + 'user': 'postgres', + 'password': '' + } + + _Base = None + + @property + def base_class(self): + if self._Base is None: + self._Base = declarative_base() + return self._Base + + def create_all(self): + self.base_class.metadata.create_all(self._engine) + + def __init__(self, config=None, cache_class=None): + # https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html + + self._engine = None + self._connection = None + + super().__init__(config, cache_class=cache_class) + + def __del__(self): + if self._connection: + self._connection.close() + + def setup(self): + connection_string_template = 'postgresql://%(user)s:%(password)s@%(host)s:%(port)d/%(db)s' + self._engine = create_engine(connection_string_template % self.get_config(), echo=True) + self._connection = self._engine.connect() + + super().setup() + + @property + def is_ready(self): + return self._engine is not None and self._connection is not None + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + data = { + 'fields': [], + 'rows': [], + 'profiling': {} + } + + start = time() + response = self._connection.execute(query_template, params) + data['profiling']['execute_time'] = time() - start + + if response.returns_rows: + data['fields'] = response.cursor.description + data['rows'] = response.fetchall() + + # TODO: Check auto-commit feature. How do we want to implement this here? + + return data + + def parse_response(self, response): + + if response: + raw_fields = response['fields'] + fields_names = {n.name: {'type': psycopg2_type_as_string(n.type_code).name} for n in raw_fields} + rows = [{raw_fields[i].name: f for i, f in enumerate(row_data)} for row_data in response['rows']] + return LongitudeQueryResponse(rows=rows, fields=fields_names, profiling=response['profiling']) + return None diff --git a/longitude/core/tests/__init__.py b/longitude/core/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/longitude/core/tests/test_cache_base.py b/longitude/core/tests/test_cache_base.py new file mode 100644 index 0000000..2a1a3b2 --- /dev/null +++ b/longitude/core/tests/test_cache_base.py @@ -0,0 +1,34 @@ +from unittest import TestCase + +from longitude.core.common.query_response import LongitudeQueryResponse +from ..caches.base import LongitudeCache + + +class TestLongitudeCache(TestCase): + def test_generate_key(self): + import string + import random + # The interesting point here is to ensure that it is extremely difficult to have collisions + # We will test really similar payloads and test for unique hashes + queries_population = 100000 + + QUERY_PATTERN = "SELECT * FROM table_%s" + random_queries = set([QUERY_PATTERN % ''.join(random.choices(string.ascii_uppercase + string.digits, k=20)) + for _ in range(queries_population)]) + + keys = set([LongitudeCache.generate_key(q, {}) for q in random_queries]) + + # By using sets we ensure uniqueness. + self.assertEqual(len(random_queries), len(keys)) + + # Also, function must return ALWAYS the same value for the same query, regarless of how many times the + # function is called + unique_key = set([LongitudeCache.generate_key('SOME_QUERY_OVER_AND_OVER', {}) for _ in range(100)]) + self.assertEqual(1, len(unique_key)) + + def test_get_nor_put_are_implemented_in_base_class(self): + cache = LongitudeCache() + with self.assertRaises(NotImplementedError): + cache.get('some_query', {}) + with self.assertRaises(NotImplementedError): + cache.put('some_query', payload=LongitudeQueryResponse()) diff --git a/longitude/core/tests/test_cache_ram.py b/longitude/core/tests/test_cache_ram.py new file mode 100644 index 0000000..8863176 --- /dev/null +++ b/longitude/core/tests/test_cache_ram.py @@ -0,0 +1,31 @@ +from unittest import TestCase, mock + +from longitude.core.common.query_response import LongitudeQueryResponse +from ..caches.ram import RamCache + + +class TestRedisCache(TestCase): + cache = None + + def setUp(self): + self.cache = RamCache() + + def test_setup_must_clean_cache(self): + with mock.patch('longitude.core.caches.ram.RamCache.flush') as fake_flush: + self.cache.setup() + fake_flush.assert_called_once() + self.assertTrue(self.cache.is_ready) + + def test_serialization_does_nothing(self): + self.assertEqual('value', self.cache.serialize_payload('value')) + self.assertEqual('value', self.cache.deserialize_payload('value')) + + def test_read_write_flush_cycle(self): + self.assertIsNone(self.cache.get('fake_key')) + payload = LongitudeQueryResponse() + payload.profiling['value'] = 42 + self.assertFalse(self.cache.put('key', payload)) + self.assertEqual(42, self.cache.get('key').profiling['value']) + + self.cache.flush() + self.assertIsNone(self.cache.get('key')) diff --git a/longitude/core/tests/test_cache_redis.py b/longitude/core/tests/test_cache_redis.py new file mode 100644 index 0000000..ab48a60 --- /dev/null +++ b/longitude/core/tests/test_cache_redis.py @@ -0,0 +1,71 @@ +from unittest import TestCase, mock + +import redis.exceptions + +from longitude.core.common.query_response import LongitudeQueryResponse +from ..caches.redis import RedisCache + + +@mock.patch('longitude.core.caches.redis.redis.Redis') +class TestRedisCache(TestCase): + cache = None + + def setUp(self): + self.cache = RedisCache(config={'host': 'some_host', 'port': 666, 'db': 0, 'password': 'some_pass'}) + + def test_is_ready_if_redis_returns_ping(self, redis_mock): + redis_mock.return_value.ping.return_value = True + self.cache.setup() + self.assertTrue(self.cache.is_ready) + + redis_mock.return_value.get.return_value = None + self.assertIsNone(self.cache.get('fake_key')) + redis_mock.return_value.get.assert_called_once() + + redis_mock.return_value.set.return_value = None + self.assertFalse(self.cache.put('some_key', LongitudeQueryResponse())) + redis_mock.return_value.exists.return_value = 1 + self.assertTrue(self.cache.put('some_key', LongitudeQueryResponse())) + self.assertEqual(2, redis_mock.return_value.set.call_count) + + redis_mock.return_value.flushall.return_value = None + self.cache.flush() + redis_mock.return_value.flushall.assert_called_once() + + def test_is_not_ready_if_redis_fails_ping_because_of_connection_error(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ConnectionError + self.cache.setup() + + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + expected_log = [ + 'ERROR:longitude.core.caches.redis:Cannot connect to Redis server at some_host:666.' + ] + + self.assertEqual(expected_log, log_test.output) + + def test_is_not_ready_if_redis_fails_ping_because_of_timeout(self, redis_mock): + redis_mock.return_value.ping.side_effect = TimeoutError + self.cache.setup() + self.assertFalse(self.cache.is_ready) + + def test_is_not_ready_because_no_password(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('NOAUTH Authentication required.') + self.cache.setup() + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + self.assertEqual(['ERROR:longitude.core.caches.redis:Redis password required.'], log_test.output) + + def test_is_not_ready_because_wrong_password(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('invalid password') + self.cache.setup() + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + self.assertEqual(['ERROR:longitude.core.caches.redis:Redis password is wrong.'], log_test.output) + + def test_is_not_ready_because_of_generic_response_error(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('some error text') + self.cache.setup() + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + self.assertEqual(['ERROR:longitude.core.caches.redis:some error text'], log_test.output) diff --git a/longitude/core/tests/test_configurable.py b/longitude/core/tests/test_configurable.py new file mode 100644 index 0000000..6a646a3 --- /dev/null +++ b/longitude/core/tests/test_configurable.py @@ -0,0 +1,49 @@ +from unittest import TestCase + +from longitude.core.common.config import LongitudeConfigurable +from longitude.core.common.exceptions import LongitudeConfigError + + +class TestConfig(TestCase): + def test_config(self): + # Config must be a dictionary + with self.assertRaises(TypeError): + LongitudeConfigurable(config=[]) + with self.assertRaises(TypeError): + LongitudeConfigurable(config="") + with self.assertRaises(TypeError): + LongitudeConfigurable(config=0) + + # Any values can go in the configuration dictionary but not expected ones trigger a warning + config = {"some_config_value": 0, "some_another_config_value": "tomato"} + with self.assertLogs(level='WARNING') as log_test: + ds = LongitudeConfigurable(config) + self.assertEqual(log_test.output, + [ + 'WARNING:longitude.core.common.config:some_another_config_value is an unexpected config value', + 'WARNING:longitude.core.common.config:some_config_value is an unexpected config value']) + + # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. + ds._default_config['some_config_value'] = 42 + ds._default_config['some_none_value'] = None + self.assertEqual(0, ds.get_config('some_config_value')) + self.assertEqual(None, ds.get_config('some_none_value')) + + # We do not allow trying to get a config value out of the default keys + with self.assertRaises(LongitudeConfigError): + self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) + + def test_get_config_root(self): + class SomeConfigurableClass(LongitudeConfigurable): + _default_config = { + 'a': None, + 'b': 'this will not be overwritten' + } + + ds = SomeConfigurableClass(config={'a': 'custom_value'}) + c = ds.get_config() + expected_config = { + 'a': 'custom_value', + 'b': 'this will not be overwritten' + } + self.assertEqual(expected_config, c) diff --git a/longitude/core/tests/test_data_source_base.py b/longitude/core/tests/test_data_source_base.py new file mode 100644 index 0000000..b68834a --- /dev/null +++ b/longitude/core/tests/test_data_source_base.py @@ -0,0 +1,110 @@ +import os +from unittest import TestCase, mock + +from ..caches.base import LongitudeCache +from ..common.query_response import LongitudeQueryResponse +from ..data_sources.base import DataSource, DataSourceQueryConfig + + +def load_raw_text(filename): + file_path = os.path.join(os.path.dirname(__file__), 'raw_text', filename) + with open(file_path, 'r') as f: + return f.read() + + +class TestDataSourceQueryConfig(TestCase): + def test_copy(self): + a = DataSourceQueryConfig() + b = a.copy() + + self.assertNotEqual(a, b) + self.assertEqual(a.__dict__, b.__dict__) + + +class TestDataSource(TestCase): + def setUp(self): + import pickle + + class FakeCache(LongitudeCache): + + @staticmethod + def generate_key(query_template, query_parameters): + if query_template == 'some_query_in_cache': + return 'hit' + return 'miss' + + def setup(self): + pass + + @property + def is_ready(self): + return True + + def execute_get(self, key): + if key == 'hit': + return pickle.dumps(LongitudeQueryResponse()) + return None + + def execute_put(self, key, payload, expiration_time_s=None): + return True + + self._cache_class = FakeCache + + def test_cache_must_extend_longitude_cache(self): + class PoorlyImplementedCache: + pass + + with self.assertRaises(TypeError): + DataSource({}, cache_class=PoorlyImplementedCache) + + def test_cache_hit(self): + ds = DataSource({}, cache_class=self._cache_class) + ds.setup() + # At high level, ds.query will return a normalized LongitudeQueryResponse + # In this test we are interested in triggering that call to the parse function that would return such object, + # but we do not care, in the abstract class, about what content is generated there. + self.assertTrue(ds.query('some_query_in_cache').comes_from_cache) + + @mock.patch('longitude.core.data_sources.base.DataSource.parse_response') + @mock.patch('longitude.core.data_sources.base.DataSource.execute_query') + def test_cache_miss(self, execute_query_mock, parse_response_mock): + ds = DataSource({}, cache_class=self._cache_class) + ds.setup() + execute_query_mock.return_value = 'some response from the server' + parse_response_mock.return_value = LongitudeQueryResponse(profiling={'value': 42}) + self.assertEqual(42, ds.query('some_query_not_in_cache').profiling['value']) + parse_response_mock.assert_called_once_with('some response from the server') + + def test_abstract_methods_are_not_implemented(self): + ds = DataSource({}) + + with self.assertRaises(NotImplementedError): + ds.query(query_template='whatever') + + def test_is_ready(self): + class FakeReadyCache(LongitudeCache): + def setup(self): + pass + + @property + def is_ready(self): + return True + + class FakeNotReadyCache(LongitudeCache): + def setup(self): + pass + + @property + def is_ready(self): + return False + + ds = DataSource(config={}, cache_class=FakeReadyCache) + self.assertTrue(ds.is_ready) + ds = DataSource(config={}, cache_class=FakeNotReadyCache) + self.assertFalse(ds.is_ready) + + def test_copy_default_query_config(self): + ds = DataSource({}) + the_copy = ds.copy_default_query_config() + self.assertNotEqual(the_copy, ds._default_query_config) + self.assertEqual(the_copy.__dict__, ds._default_query_config.__dict__) diff --git a/longitude/core/tests/test_data_source_carto.py b/longitude/core/tests/test_data_source_carto.py new file mode 100644 index 0000000..12c810f --- /dev/null +++ b/longitude/core/tests/test_data_source_carto.py @@ -0,0 +1,68 @@ +from unittest import TestCase, mock + +from carto.exceptions import CartoException + +from ..data_sources.base import LongitudeRetriesExceeded +from ..data_sources.carto import CartoDataSource + + +class TestCartoDataSource(TestCase): + + def test_default_configuration_loads(self): + with self.assertLogs(level='INFO') as log_test: + carto_ds = CartoDataSource() + module_name = 'longitude.core.common.config' + self.assertEqual(log_test.output, + ['INFO:%s:api_key key is using default value' % module_name, + 'INFO:%s:api_version key is using default value' % module_name, + 'INFO:%s:cache key is using default value' % module_name, + 'INFO:%s:on_premise_domain key is using default value' % module_name, + 'INFO:%s:user key is using default value' % module_name, + 'INFO:%s:uses_batch key is using default value' % module_name] + ) + + self.assertEqual('', carto_ds.get_config('api_key')) + self.assertEqual('v2', carto_ds.get_config('api_version')) + self.assertEqual('', carto_ds.get_config('on_premise_domain')) + self.assertEqual('', carto_ds.get_config('user')) + self.assertFalse(carto_ds.get_config('uses_batch')) + + def test_setup_not_ready_if_empty_user(self): + carto_ds = CartoDataSource({ + 'uses_batch': True # Just to enable that coverage branch for now + }) + carto_ds.setup() + self.assertFalse(carto_ds.is_ready) + + def test_setup_needs_some_user(self): + carto_ds = CartoDataSource({ + 'user': 'some_user' + }) + carto_ds.setup() + self.assertTrue(carto_ds.is_ready) + self.assertEqual('https://some_user.carto.com', carto_ds.base_url) + + def test_setup_can_accept_on_premise_domain(self): + carto_ds = CartoDataSource({ + 'user': 'some_on_premise_user', + 'on_premise_domain': 'some_cool_domain.io' + }) + carto_ds.setup() + self.assertTrue(carto_ds.is_ready) + self.assertEqual('https://some_cool_domain.io/user/some_on_premise_user', carto_ds.base_url) + + def test_succesful_query(self): + ds = CartoDataSource() + ds._sql_client = mock.MagicMock() + ds._sql_client.send.return_value = {'rows': [], 'time': 42.0, 'fields': {}, 'total_rows': 0} + result = ds.query('some query') + ds._sql_client.send.assert_called_with('some query', do_post=False, format='json', parse_json=True) + self.assertEqual([], result.rows) + self.assertEqual(42, result.profiling['response_time']) + + def test_wrong_query(self): + ds = CartoDataSource() + ds._sql_client = mock.MagicMock() + ds._sql_client.send.side_effect = CartoException + with self.assertRaises(LongitudeRetriesExceeded): + ds.query('some irrelevant query') diff --git a/longitude/core/tests/test_data_source_postgres.py b/longitude/core/tests/test_data_source_postgres.py new file mode 100644 index 0000000..b1209be --- /dev/null +++ b/longitude/core/tests/test_data_source_postgres.py @@ -0,0 +1,3 @@ +from unittest import TestCase + +from ..data_sources.postgres.default import DefaultPostgresDataSource diff --git a/longitude/core/tests/test_data_source_sqlalchemy.py b/longitude/core/tests/test_data_source_sqlalchemy.py new file mode 100644 index 0000000..707c252 --- /dev/null +++ b/longitude/core/tests/test_data_source_sqlalchemy.py @@ -0,0 +1,47 @@ +from unittest import TestCase, mock +from ..data_sources.postgres.sqlalchemy import SQLAlchemyDataSource + +TESTED_MODULE_PATH = 'longitude.core.data_sources.postgres.sqlalchemy.%s' + + +class TestSQLAlchemyDataSource(TestCase): + + def test_default_configuration_loads(self): + with self.assertLogs(level='INFO') as log_test: + carto_ds = SQLAlchemyDataSource() + module_name = 'longitude.core.common.config' + self.assertEqual(log_test.output, + ['INFO:%s:db key is using default value' % module_name, + 'INFO:%s:host key is using default value' % module_name, + 'INFO:%s:password key is using default value' % module_name, + 'INFO:%s:port key is using default value' % module_name, + 'INFO:%s:user key is using default value' % module_name + ] + ) + + self.assertEqual('', carto_ds.get_config('db')) + self.assertEqual('localhost', carto_ds.get_config('host')) + self.assertEqual('', carto_ds.get_config('password')) + self.assertEqual(5432, carto_ds.get_config('port')) + self.assertEqual('postgres', carto_ds.get_config('user')) + + @mock.patch(TESTED_MODULE_PATH % 'declarative_base') + def test_base_class(self, alchemy_base_mock): + alchemy_base_mock.return_value = object() + carto_ds = SQLAlchemyDataSource() + self.assertIsNotNone(carto_ds.base_class) # Here, first time instance is created + self.assertIsNotNone(carto_ds.base_class) # Here, instance is recovered + alchemy_base_mock.assert_called_once() # Base class is only created once by our wrapper + + @mock.patch(TESTED_MODULE_PATH % 'SQLAlchemyDataSource.base_class') + def test_create_all(self, base_class_mock): + base_class_mock.metadata.create_all = mock.MagicMock() + carto_ds = SQLAlchemyDataSource() + carto_ds.create_all() + base_class_mock.metadata.create_all.assert_called_once() + + def test_setup(self): + carto_ds = SQLAlchemyDataSource() + with mock.patch(TESTED_MODULE_PATH % 'create_engine') as fake_create_engine: + carto_ds.setup() + fake_create_engine.assert_called_once() diff --git a/longitude/core/tests/test_environment_configuration_dictionary.py b/longitude/core/tests/test_environment_configuration_dictionary.py new file mode 100644 index 0000000..29f0fd7 --- /dev/null +++ b/longitude/core/tests/test_environment_configuration_dictionary.py @@ -0,0 +1,38 @@ +from unittest import TestCase, mock +from longitude.core.common.config import EnvironmentConfiguration as Config + +fake_environment = { + 'LONGITUDE__PARENT__CHILD__VALUE_A': '42', + 'LONGITUDE__PARENT__CHILD__VALUE_B': 'wut', + 'LONGITUDE__VALUE_A': '8008' +} + + +@mock.patch.dict('longitude.core.common.config.os.environ', fake_environment) +class TestConfigurationDictionary(TestCase): + + def test_existing_values_return_strings_or_integers(self): + self.assertEqual(42, Config.get('parent.child.value_a')) + self.assertEqual('wut', Config.get('parent.child.value_b')) + self.assertEqual(8008, Config.get('value_a')) + + def test_non_existing_values_return_none(self): + self.assertEqual(None, Config.get('wrong_value')) + self.assertEqual(None, Config.get('wrong_parent.child.value')) + self.assertEqual(None, Config.get('parent.wrong_child.value')) + self.assertEqual(None, Config.get('parent.child.wrong_value')) + self.assertEqual(None, Config.get('parent.wrong_child')) + + def test_existing_nested_values_return_dictionaries(self): + fake_config = { + 'parent': + {'child': + { + 'value_a': 42, + 'value_b': 'wut' + } + }, + 'value_a': 8008 + } + self.assertEqual(fake_config, Config.get()) + self.assertEqual(fake_config['parent']['child'], Config.get('parent.child')) diff --git a/longitude/core/tests/test_helpers.py b/longitude/core/tests/test_helpers.py new file mode 100644 index 0000000..10efa1d --- /dev/null +++ b/longitude/core/tests/test_helpers.py @@ -0,0 +1,20 @@ +from unittest import TestCase, mock +from longitude.core.common.helpers import DisabledCache +from longitude.core.data_sources.base import DataSource + + +class TestHelpers(TestCase): + + @mock.patch('longitude.core.data_sources.base.DataSource', spec=DataSource) + def test_disable_cache_context_manager_triggers_cache(self, fake_data_source): + fake_data_source.enable_cache.return_value = None + fake_data_source.disable_cache.return_value = None + with DisabledCache(fake_data_source): + fake_data_source.disable_cache.assert_called_once() + fake_data_source.enable_cache.assert_called_once() + + @mock.patch('longitude.core.data_sources.base.DataSource') + def test_disable_cache_context_manager_must_receive_a_data_source(self, fake_data_source): + with self.assertRaises(TypeError): + with DisabledCache(fake_data_source): + print('This text should never be printed') diff --git a/longitude/samples/.gitignore b/longitude/samples/.gitignore new file mode 100644 index 0000000..c4d4a8f --- /dev/null +++ b/longitude/samples/.gitignore @@ -0,0 +1 @@ +*_sample_config.py diff --git a/longitude/samples/README.md b/longitude/samples/README.md new file mode 100644 index 0000000..5c09f48 --- /dev/null +++ b/longitude/samples/README.md @@ -0,0 +1,7 @@ +# Longitude SAMPLE scripts + +In this folder you will find examples about how to use the features in the library. + +Please, keep in mind that these will be not so updated as the ```tests```. You should rely on the ```tests``` tp fully understand how the library works. + +Please, if you find any wrong example or something to improve, submit a PR :) \ No newline at end of file diff --git a/longitude/samples/__init__.py b/longitude/samples/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/longitude/samples/__init__.py @@ -0,0 +1 @@ + diff --git a/longitude/samples/carto_sample.py b/longitude/samples/carto_sample.py new file mode 100644 index 0000000..a07df9c --- /dev/null +++ b/longitude/samples/carto_sample.py @@ -0,0 +1,37 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'carto_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from longitude.core.data_sources.base import LongitudeRetriesExceeded +from longitude.core.data_sources.carto import CartoDataSource +from longitude.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME + +if __name__ == "__main__": + config = { + 'api_key': CARTO_API_KEY, + 'user': CARTO_USER + } + + ds = CartoDataSource(config) + ds.setup() + if ds.is_ready: + try: + data = ds.query('select * from %s limit 30' % CARTO_TABLE_NAME) + [print(r) for r in data.rows] + print(data.profiling) + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") diff --git a/longitude/samples/carto_sample_with_ram_cache.py b/longitude/samples/carto_sample_with_ram_cache.py new file mode 100644 index 0000000..7a20e1c --- /dev/null +++ b/longitude/samples/carto_sample_with_ram_cache.py @@ -0,0 +1,74 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'carto_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" + +import time +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from longitude.core.caches.ram import RamCache +from longitude.core.data_sources.base import LongitudeRetriesExceeded +from longitude.core.data_sources.carto import CartoDataSource +from longitude.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME + +if __name__ == "__main__": + config = { + 'api_key': CARTO_API_KEY, + 'user': CARTO_USER + } + + ds = CartoDataSource(config, cache_class=RamCache) + ds.setup() + if ds.is_ready: + try: + + REPEATED_QUERY = 'select * from %s limit 30' % CARTO_TABLE_NAME + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print("It took %s without cache" % elapsed) + print('Uses cache? ' + str(data.comes_from_cache)) + + # Repeated read queries return cached values + start_with_cache = time.time() + cached_data = ds.query(REPEATED_QUERY) + elapsed_with_cache = time.time() - start_with_cache + print("It took %s with cache" % elapsed_with_cache) + print('Uses cache? ' + str(cached_data.comes_from_cache)) + + # Data is the same... + assert str(data) == str(cached_data) + + # You can also disable the cache for a while (nothing gets read or written) + ds.disable_cache() + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print('It took %s with disabled cache' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + ds.enable_cache() + + # Or disable specific queries via query_config (nothing gets read or written) + query_config = ds.copy_default_query_config() + start = time.time() + data = ds.query(REPEATED_QUERY, query_config=query_config, use_cache=False) + elapsed = time.time() - start + print('It took %s with disabled cache (per-query)' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + + print('If you see decreasing times it is probably because CARTOs cache doing its job!') + + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") diff --git a/longitude/samples/carto_sample_with_redis_cache.py b/longitude/samples/carto_sample_with_redis_cache.py new file mode 100644 index 0000000..807ee6b --- /dev/null +++ b/longitude/samples/carto_sample_with_redis_cache.py @@ -0,0 +1,84 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'carto_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! + +There is a cache entry in the docker-compose.yaml file. You can use it to run a local Redis container to test this: + +> sudo docker-compose up -d cache + +Also, you can connect to that container and check the cache using the CLI while running this program. + +> sudo docker exec -it longitude_cache_1 redis-cli + +""" + +import time +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from longitude.core.common.helpers import DisabledCache +from longitude.core.caches.redis import RedisCache +from longitude.core.common.exceptions import LongitudeRetriesExceeded +from longitude.core.data_sources.carto import CartoDataSource +from longitude.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME + +if __name__ == "__main__": + config = { + 'api_key': CARTO_API_KEY, + 'user': CARTO_USER, + 'cache': {'password': 'longitude'} + } + + ds = CartoDataSource(config, cache_class=RedisCache) + ds.setup() + if ds.is_ready: + try: + + REPEATED_QUERY = 'select * from %s limit 30' % CARTO_TABLE_NAME + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print("It took %s without cache" % elapsed) + print('Uses cache? ' + str(data.comes_from_cache)) + + # Repeated read queries return cached values + start_with_cache = time.time() + cached_data = ds.query(REPEATED_QUERY) + elapsed_with_cache = time.time() - start_with_cache + print("It took %s with cache" % elapsed_with_cache) + print('Uses cache? ' + str(cached_data.comes_from_cache)) + + # You can also disable the cache for a while (nothing gets read or written) + with DisabledCache(ds): + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print('It took %s with disabled cache' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + + # Or disable specific queries via query_config (nothing gets read or written) + query_config = ds.copy_default_query_config() + start = time.time() + data = ds.query(REPEATED_QUERY, query_config=query_config, use_cache=False) + elapsed = time.time() - start + print('It took %s with disabled cache (per-query)' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + + print('If you see decreasing times it is probably because CARTOs cache doing its job!') + + # As Redis is persistent for this script, we flush it after execution so next run does not hit at start + ds.flush_cache() + + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") diff --git a/longitude/samples/mixed_datasources.py b/longitude/samples/mixed_datasources.py new file mode 100644 index 0000000..af31b36 --- /dev/null +++ b/longitude/samples/mixed_datasources.py @@ -0,0 +1,75 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create the environment variables needed and the county_population in your Carto account. +(We just use the cartodb_id field, so it does not matter what you put there) +This is an example that you can run using the provided docker-compose configuration. + +A fast method is: + +1. copy .env.sample -> .env +2. edit .env adding the carto credentials and table +3. (re)activate your pipenv shell; it will load the variables in that shell + +We are focusing here on the configuration process so there is no error flow control nor fancy query construction. +For such features, check specific samples. + +""" +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) + +from longitude.core.caches.redis import RedisCache +from longitude.core.data_sources.postgres.default import DefaultPostgresDataSource +from longitude.core.data_sources.carto import CartoDataSource +from longitude.core.common.config import EnvironmentConfiguration as Config + + +def import_table_values_from_carto(limit): + # First, we read from CARTO our 'county_population' + # If you execute this script twice fast, you will make use of the cache. + # After 3 seconds, the Carto query will be executed again if requested. + carto_data = carto.query('select * from county_population limit %d' % limit, use_cache=True, expiration_time_s=3) + print(carto_data.comes_from_cache) + # Then, we create a local table + postgres.query("drop table if exists county_population", use_cache=False) + postgres.query( + 'create table county_population(id serial PRIMARY KEY, cartodb_id integer UNIQUE NOT NULL, the_geom text)', + needs_commit=True, + use_cache=False + ) + + # Now we want to copy row by row these values using simple inserts: + + # Using psycopg2 directly, we must build our queries and parameters carefully + # i.e. when doing multiple values inserts: + values_template = ('(%s,%s),' * limit)[:-1] + params = [None] * limit * 2 # We reserve places for all values (=limit) for all columns (=2)... + params[::2] = [r['cartodb_id'] for r in carto_data.rows] # ... and we alternate id and geom in the values + params[1::2] = [r['the_geom'] for r in carto_data.rows] # This way is both efficient (not using copy) and safe + + postgres.query( + 'insert into county_population (cartodb_id, the_geom) values %s' % values_template, + params=params, + needs_commit=True) + + res = postgres.query('select * from county_population') + print(res.rows) + + +if __name__ == "__main__": + + print('REDIS password is %s' % Config.get('carto_main.cache.password')) + carto = CartoDataSource(Config.get('carto_main'), cache_class=RedisCache) + postgres = DefaultPostgresDataSource(Config.get('postgres_main')) + carto.setup() + postgres.setup() + + if carto.is_ready and postgres.is_ready: + import_table_values_from_carto(limit=30) diff --git a/longitude/samples/postgres_sample.py b/longitude/samples/postgres_sample.py new file mode 100644 index 0000000..351a083 --- /dev/null +++ b/longitude/samples/postgres_sample.py @@ -0,0 +1,67 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'postgresql_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from longitude.core.caches.ram import RamCache +from longitude.core.data_sources.base import LongitudeRetriesExceeded +from longitude.core.data_sources.postgres.default import DefaultPostgresDataSource +from longitude.samples.postgres_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS + +if __name__ == "__main__": + config = { + 'host': POSTGRES_HOST or 'localhost', + 'port': POSTGRES_PORT or 5432, + 'db': POSTGRES_DB or 'longitude', + 'user': POSTGRES_USER or 'longitude', + 'password': POSTGRES_PASS or 'longitude' + } + + ds = DefaultPostgresDataSource(config, cache_class=RamCache) + ds.setup() + if ds.is_ready: + try: + + r0 = ds.query("drop table if exists users", use_cache=False) + r1 = ds.query( + 'create table users(id serial PRIMARY KEY, name varchar(50) UNIQUE NOT NULL, password varchar(50))', + needs_commit=True, + use_cache=False + ) + print(r1.profiling) + + for i in range(10): + r2 = ds.query("insert into users(name, password) values(%(user)s, %(password)s)", + needs_commit=True, + use_cache=False, + params={ + 'user': 'longitude_user_' + str(i), + 'password': 'unsafe_password_' + str(i) + + }) + print(r2.profiling) + + r3 = ds.query('select * from users', use_cache=True) + + print(r3.rows) + print(r3.profiling) + + r4 = ds.query('select * from users', use_cache=True) + print(r4.profiling) + print('It is %f times faster using cache' % (r4.profiling['execute_time'] / r4.profiling['cache_time'])) + + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") diff --git a/longitude/samples/sqlalchemy_sample.py b/longitude/samples/sqlalchemy_sample.py new file mode 100644 index 0000000..e19c531 --- /dev/null +++ b/longitude/samples/sqlalchemy_sample.py @@ -0,0 +1,88 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'sqlalchemy_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" +import os +import sys + +from sqlalchemy import text + +from longitude.core.caches.ram import RamCache + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from longitude.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource +from longitude.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS + + +def prepare_sample_table(engine): + """ This is just one way to provide table to show how queries work. + You can generate your queries as you prefer by using any of the SQL Alchemy APIs + """ + + class Avenger(ds.base_class): + from sqlalchemy import Column, Integer, String + __tablename__ = 'avengers' + + id = Column(Integer, primary_key=True) + name = Column(String) + fullname = Column(String) + password = Column(String) + + def __repr__(self): + return "" % (self.name, self.fullname, self.password) + + if Avenger.__table__.exists(engine): + Avenger.__table__.drop(engine) + Avenger.__table__.create(engine) + return Avenger.__table__ + + +if __name__ == "__main__": + config = { + 'host': POSTGRES_HOST or 'localhost', + 'port': POSTGRES_PORT or 5432, + 'db': POSTGRES_DB or 'longitude', + 'user': POSTGRES_USER or 'longitude', + 'password': POSTGRES_PASS or 'longitude' + } + + ds = SQLAlchemyDataSource(config, cache_class=RamCache) + ds.setup() + if ds.is_ready: + # We prepare a table to play around + table = prepare_sample_table(ds._engine) + + # Demo insert. Notice how values are passed as parameters instead of just pasted into some string + q = table.insert() + + # With SQLAlchemy we can bind lists and subsequent rendered queries will be executed + params = [ + {'name': 'tony', 'fullname': 'Tony Stark Jr.', 'password': 'smartestavenger'}, + {'name': 'hulk', 'fullname': 'Dr. Phd. Bruce Banner', 'password': 'smartestavenger'}, + {'name': 'cap', 'fullname': 'Capt. Steve Rogers', 'password': 'igotthatreference'} + ] + ds.query(q, params, use_cache=False) + + # Demo select. Again, the search is done by a parametrized query. In this case, direct text is used as + # where clause. + q = table.select('password = :password') + params = {'password': 'igotthatreference'} + r = ds.query(q, params, use_cache=True) + print(r.fields) + print(r.rows) + print("Cached? " + str(r.comes_from_cache)) + + # Just repeat to check the cache working + r = ds.query(q, params, use_cache=True) + print(r.rows) + print("Cached? " + str(r.comes_from_cache)) + else: + print("Data source is not properly configured.") diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..3181d91 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,466 @@ +[[package]] +category = "dev" +description = "apipkg: namespace control and lazy-import mechanism" +name = "apipkg" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.5" + +[[package]] +category = "dev" +description = "An abstract syntax tree for Python with inference support." +name = "astroid" +optional = false +python-versions = ">=3.4.*" +version = "2.1.0" + +[package.dependencies] +lazy-object-proxy = "*" +six = "*" +wrapt = "*" + +[package.dependencies.typed-ast] +python = "<3.7" +version = "*" + +[[package]] +category = "dev" +description = "Atomic file writes." +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0" + +[[package]] +category = "dev" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = "*" +version = "18.2.0" + +[[package]] +category = "main" +description = "SDK around CARTO's APIs" +name = "carto" +optional = false +python-versions = "*" +version = "1.4.0" + +[package.dependencies] +pyrestcli = ">=0.6.4" +requests = ">=2.7.0" + +[[package]] +category = "main" +description = "Python package for providing Mozilla's CA Bundle." +name = "certifi" +optional = false +python-versions = "*" +version = "2018.11.29" + +[[package]] +category = "main" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" +optional = false +python-versions = "*" +version = "3.0.4" + +[[package]] +category = "dev" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\"" +name = "colorama" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.4.1" + +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" +version = "4.5.2" + +[[package]] +category = "dev" +description = "execnet: rapid multi-Python deployment" +name = "execnet" +optional = false +python-versions = "*" +version = "1.5.0" + +[package.dependencies] +apipkg = ">=1.4" + +[[package]] +category = "main" +description = "Clean single-source support for Python 3 and 2" +name = "future" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.17.1" + +[[package]] +category = "main" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8" + +[[package]] +category = "dev" +description = "A Python utility / library to sort Python imports." +name = "isort" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "4.3.4" + +[[package]] +category = "dev" +description = "A fast and thorough lazy object proxy." +name = "lazy-object-proxy" +optional = false +python-versions = "*" +version = "1.3.1" + +[[package]] +category = "dev" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +name = "mako" +optional = false +python-versions = "*" +version = "1.0.7" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[[package]] +category = "dev" +description = "Python implementation of Markdown." +name = "markdown" +optional = false +python-versions = "*" +version = "2.4.1" + +[[package]] +category = "dev" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.0" + +[[package]] +category = "dev" +description = "McCabe checker, plugin for flake8" +name = "mccabe" +optional = false +python-versions = "*" +version = "0.6.1" + +[[package]] +category = "dev" +description = "More routines for operating on iterables, beyond itertools" +name = "more-itertools" +optional = false +python-versions = "*" +version = "5.0.0" + +[package.dependencies] +six = ">=1.0.0,<2.0.0" + +[[package]] +category = "dev" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.0" + +[package.dependencies] +pyparsing = ">=2.0.2" +six = "*" + +[[package]] +category = "dev" +description = "A simple program and library to auto generate API documentation for Python modules." +name = "pdoc" +optional = false +python-versions = "*" +version = "0.3.2" + +[package.dependencies] +mako = "*" +markdown = "<2.5" + +[[package]] +category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.8.1" + +[[package]] +category = "main" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +name = "psycopg2-binary" +optional = false +python-versions = "*" +version = "2.7.7" + +[[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.7.0" + +[[package]] +category = "dev" +description = "python code static checker" +name = "pylint" +optional = false +python-versions = ">=3.4.*" +version = "2.2.2" + +[package.dependencies] +astroid = ">=2.0.0" +colorama = "*" +isort = ">=4.2.5" +mccabe = "*" + +[[package]] +category = "dev" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = "*" +version = "2.3.1" + +[[package]] +category = "main" +description = "Generic REST client for Python" +name = "pyrestcli" +optional = false +python-versions = "*" +version = "0.6.8" + +[package.dependencies] +future = ">=0.15.2" +python-dateutil = ">=2.5.3" +requests = ">=2.10.0" + +[[package]] +category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "4.2.0" + +[package.dependencies] +atomicwrites = ">=1.0" +attrs = ">=17.4.0" +colorama = "*" +more-itertools = ">=4.0.0" +pluggy = ">=0.7" +py = ">=1.5.0" +setuptools = "*" +six = ">=1.10.0" + +[[package]] +category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.6.1" + +[package.dependencies] +coverage = ">=4.4" +pytest = ">=3.6" + +[[package]] +category = "dev" +description = "run tests in isolated forked subprocesses" +name = "pytest-forked" +optional = false +python-versions = "*" +version = "1.0.1" + +[package.dependencies] +pytest = ">=3.1.0" + +[[package]] +category = "dev" +description = "py.test plugin to show failures instantly" +name = "pytest-instafail" +optional = false +python-versions = "*" +version = "0.4.0" + +[package.dependencies] +pytest = ">=2.9" + +[[package]] +category = "dev" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +name = "pytest-sugar" +optional = false +python-versions = "*" +version = "0.9.2" + +[package.dependencies] +packaging = ">=14.1" +pytest = ">=2.9" +termcolor = ">=1.1.0" + +[[package]] +category = "dev" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +name = "pytest-xdist" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.26.1" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=3.6.0" +pytest-forked = "*" +six = "*" + +[[package]] +category = "main" +description = "Extensions to the standard Python datetime module" +name = "python-dateutil" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.7.5" + +[package.dependencies] +six = ">=1.5" + +[[package]] +category = "main" +description = "Python client for Redis key-value store" +name = "redis" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.1.0" + +[[package]] +category = "main" +description = "Python HTTP for Humans." +name = "requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.21.0" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<3.1.0" +idna = ">=2.5,<2.9" +urllib3 = ">=1.21.1,<1.25" + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +name = "six" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "1.12.0" + +[[package]] +category = "main" +description = "Database Abstraction Library" +name = "sqlalchemy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0b2" + +[[package]] +category = "dev" +description = "ANSII Color formatting for output in terminal." +name = "termcolor" +optional = false +python-versions = "*" +version = "1.1.0" + +[[package]] +category = "dev" +description = "a fork of Python 2 and 3 ast modules with type comment support" +marker = "python_version < \"3.7\" and implementation_name == \"cpython\"" +name = "typed-ast" +optional = false +python-versions = "*" +version = "1.3.0" + +[[package]] +category = "main" +description = "HTTP library with thread-safe connection pooling, file post, and more." +name = "urllib3" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +version = "1.24.1" + +[[package]] +category = "dev" +description = "Module for decorators, wrappers and monkey patching." +name = "wrapt" +optional = false +python-versions = "*" +version = "1.11.1" + +[metadata] +content-hash = "4af8291bb558d197491f5c0dcef27141d04d8b452daad5c65d1d6b2c2246bcb6" +python-versions = "^3.6" + +[metadata.hashes] +apipkg = ["37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", "58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"] +astroid = ["35b032003d6a863f5dcd7ec11abd5cd5893428beaa31ab164982403bcb311f22", "6a5d668d7dc69110de01cdf7aeec69a679ef486862a0850cc0fd5571505b6b7e"] +atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"] +attrs = ["10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", "ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"] +carto = ["9a54ece9d8f940bc3de3cb742e189c4ea681494d5ec251fec469319a39093dbc"] +certifi = ["47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", "993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"] +chardet = ["84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"] +colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"] +coverage = ["06123b58a1410873e22134ca2d88bd36680479fe354955b3579fb8ff150e4d27", "09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", "0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", "0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", "0d34245f824cc3140150ab7848d08b7e2ba67ada959d77619c986f2062e1f0e8", "10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", "1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", "1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", "258b21c5cafb0c3768861a6df3ab0cfb4d8b495eee5ec660e16f928bf7385390", "2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", "3ad59c84c502cd134b0088ca9038d100e8fb5081bbd5ccca4863f3804d81f61d", "447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", "46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", "4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", "510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", "5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", "5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", "5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", "6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", "6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", "71afc1f5cd72ab97330126b566bbf4e8661aab7449f08895d21a5d08c6b051ff", "7349c27128334f787ae63ab49d90bf6d47c7288c63a0a5dfaa319d4b4541dd2c", "77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", "828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", "859714036274a75e6e57c7bab0c47a4602d2a8cfaaa33bbdb68c8359b2ed4f5c", "85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", "869ef4a19f6e4c6987e18b315721b8b971f7048e6eaea29c066854242b4e98d9", "8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", "977e2d9a646773cc7428cdd9a34b069d6ee254fadfb4d09b3f430e95472f3cf3", "99bd767c49c775b79fdcd2eabff405f1063d9d959039c0bdd720527a7738748a", "a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", "aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", "ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", "b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", "bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", "c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", "d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", "d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", "da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", "ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", "ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9"] +execnet = ["a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", "fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83"] +future = ["67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"] +idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] +isort = ["1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497"] +lazy-object-proxy = ["0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33", "1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39", "209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019", "27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088", "27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b", "2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e", "2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6", "320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b", "50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5", "5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff", "61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd", "6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7", "7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff", "7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d", "7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2", "7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35", "81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4", "933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514", "94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252", "ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109", "bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f", "cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c", "d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92", "ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577", "e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d", "e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d", "e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f", "eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a", "f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"] +mako = ["4e02fde57bd4abb5ec400181e4c314f56ac3e49ba4fb8b0d50bba18cb27d25ae"] +markdown = ["812ec5249f45edc31330b7fb06e52aaf6ab2d83aa27047df7cb6837ef2d269b6", "866f5474c2361de7ccf806b438c0462380b4b90aacb9fdf59dfc4b166fb66389"] +markupsafe = ["048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", "130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", "19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", "1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", "1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", "1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", "1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", "31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", "3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", "4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", "525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", "52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", "52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", "5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", "5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", "5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", "7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", "83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", "857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", "98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", "bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", "d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", "e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", "edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", "efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", "f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", "f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", "fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"] +mccabe = ["ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"] +more-itertools = ["38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", "c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", "fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"] +packaging = ["0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", "9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"] +pdoc = ["7835909580d5a14a06bd3de4416cf17f86a146ecb12eeb5cd83d9a93d03e6d27"] +pluggy = ["8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", "980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a"] +psycopg2-binary = ["19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", "2b69cf4b0fa2716fd977aa4e1fd39af6110eb47b2bb30b4e5a469d8fbecfc102", "2e952fa17ba48cbc2dc063ddeec37d7dc4ea0ef7db0ac1eda8906365a8543f31", "348b49dd737ff74cfb5e663e18cb069b44c64f77ec0523b5794efafbfa7df0b8", "3d72a5fdc5f00ca85160915eb9a973cf9a0ab8148f6eda40708bf672c55ac1d1", "4957452f7868f43f32c090dadb4188e9c74a4687323c87a882e943c2bd4780c3", "5138cec2ee1e53a671e11cc519505eb08aaaaf390c508f25b09605763d48de4b", "587098ca4fc46c95736459d171102336af12f0d415b3b865972a79c03f06259f", "5b79368bcdb1da4a05f931b62760bea0955ee2c81531d8e84625df2defd3f709", "5cf43807392247d9bc99737160da32d3fa619e0bfd85ba24d1c78db205f472a4", "676d1a80b1eebc0cacae8dd09b2fde24213173bf65650d22b038c5ed4039f392", "6b0211ecda389101a7d1d3df2eba0cf7ffbdd2480ca6f1d2257c7bd739e84110", "79cde4660de6f0bb523c229763bd8ad9a93ac6760b72c369cf1213955c430934", "7aba9786ac32c2a6d5fb446002ed936b47d5e1f10c466ef7e48f66eb9f9ebe3b", "7c8159352244e11bdd422226aa17651110b600d175220c451a9acf795e7414e0", "945f2eedf4fc6b2432697eb90bb98cc467de5147869e57405bfc31fa0b824741", "96b4e902cde37a7fc6ab306b3ac089a3949e6ce3d824eeca5b19dc0bedb9f6e2", "9a7bccb1212e63f309eb9fab47b6eaef796f59850f169a25695b248ca1bf681b", "a3bfcac727538ec11af304b5eccadbac952d4cca1a551a29b8fe554e3ad535dc", "b19e9f1b85c5d6136f5a0549abdc55dcbd63aba18b4f10d0d063eb65ef2c68b4", "b664011bb14ca1f2287c17185e222f2098f7b4c857961dbcf9badb28786dbbf4", "bde7959ef012b628868d69c474ec4920252656d0800835ed999ba5e4f57e3e2e", "cb095a0657d792c8de9f7c9a0452385a309dfb1bbbb3357d6b1e216353ade6ca", "d16d42a1b9772152c1fe606f679b2316551f7e1a1ce273e7f808e82a136cdb3d", "d444b1545430ffc1e7a24ce5a9be122ccd3b135a7b7e695c5862c5aff0b11159", "d93ccc7bf409ec0a23f2ac70977507e0b8a8d8c54e5ee46109af2f0ec9e411f3", "df6444f952ca849016902662e1a47abf4fa0678d75f92fd9dd27f20525f809cd", "e63850d8c52ba2b502662bf3c02603175c2397a9acc756090e444ce49508d41e", "ec43358c105794bc2b6fd34c68d27f92bea7102393c01889e93f4b6a70975728", "f4c6926d9c03dadce7a3b378b40d2fea912c1344ef9b29869f984fb3d2a2420b"] +py = ["bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", "e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"] +pylint = ["689de29ae747642ab230c6d37be2b969bf75663176658851f456619aacf27492", "771467c434d0d9f081741fec1d64dfb011ed26e65e12a28fe06ca2f61c4d556c"] +pyparsing = ["66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a", "f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3"] +pyrestcli = ["4e98b5cfba0a300acc78a7a4b7c91826edf56b12b588aa316cae4bff8696c644"] +pytest = ["65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", "6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d"] +pytest-cov = ["0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", "230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f"] +pytest-forked = ["260d03fbd38d5ce41a657759e8d19bc7c8cfa6d0dcfa36c0bc9742d33bc30742", "8d05c2e6f33cd4422571b2b1bb309720c398b0549cff499e3e4cde661875ab54"] +pytest-instafail = ["162bd7c5c196e3b2fe2a5285b69362ee3d9f768d5451413ef914be38df74e3de"] +pytest-sugar = ["26cf8289fe10880cbbc130bd77398c4e6a8b936d8393b116a5c16121d95ab283", "fcd87a74b2bce5386d244b49ad60549bfbc4602527797fac167da147983f58ab"] +pytest-xdist = ["4a201bb3ee60f5dd6bb40c5209d4e491cecc4d5bafd656cfb10f86178786e568", "d03d1ff1b008458ed04fa73e642d840ac69b4107c168e06b71037c62d7813dd4"] +python-dateutil = ["063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", "88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"] +redis = ["74c892041cba46078ae1ef845241548baa3bd3634f9a6f0f952f006eb1619c71", "7ba8612bbfd966dea8c62322543fed0095da2834dbd5a7c124afbc617a156aa7"] +requests = ["502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", "7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"] +six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] +sqlalchemy = ["c08cee353acaa05dd4ddf8ae0b0844ae779ed88e0b0784a2c9e0c0f9118eb64c"] +termcolor = ["1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"] +typed-ast = ["05b0fa87c5eabbbfaa28727adf18dc758c8afa4df2b7b2bb82ff95f3dad5eb24", "3031f55fc790d8838d08190477fd3536c9d18019ed6ea94455721f4320f8ab31", "3e7fac848e8278daa323f5e496df9c009993a0258861683463a173021cb9ee11", "442c505d2476796780e66140f68fdaf97baa19cf96511a52d41b51581d7f864f", "44afe8609e5ec041dcbe61b391fc5466084f58c4c6554563223f8aff92b04200", "584e9ae9b2aaa59f3535c06c595a3bf0419b0feef3a3511ff42b2b4ee4222f13", "5f403aef76934191cc0d8b0a657e058f9cce418f0851db9a8af092ea5cc79d9c", "6b1ddb53b1a079e4eb206df51e5d5e00bdb0b8e0a7990420c56ae79d3f1baec8", "8fbd7856774b7b65eef67163bb802fbffa18a4ccf548d2c040f3b956084c4951", "ab1739be98c346b4c223a28f4f2beaadd0417044f0e58a3b0b719a7ecb7d0e48", "b57566f67c24c8bde48ba67daa7862ebfef15012780684f688af9734c93f26af", "b980201ee3af335779a716a0d870be94d55385275d7c8fed7e5f3d9daaffb8b4", "bf0ad5d68226cbe11cd91e4e9713a93ad26d6c6de46b94d2a51a8d29f8c1e2b1", "c5f540eb30d52c7048b5fe52511247ab6c0bc80c23339e2b8642584efd3bb556", "daaca1e06690cb612a3b96ff20c9905e9c342de8cb504587a1707b91d2596271", "dacce1b97aaddeae2abbba8ecdb40bc8c3ff688ce295b2849730dcc5e82141c5", "daf72be36ebbeee7b6aa946195e5b59f1add22f5308e855a947fbd26ddecfbf1", "db629063c0ea5fd631924e27ff92a5419beeeb342d2a20b20e52c7c1a3d6b535", "e5698741ac0fb21ae5b1d5e92774b5ceda498f98f7a53270e0b4d516d20f07a0"] +urllib3 = ["61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"] +wrapt = ["4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..d0ce8ba --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,40 @@ +[tool.poetry] +name = "longitude" +version = "1.0.0" +description = "" +authors = [ +"Dani Ramirez ", +"Alberto Asuero ", +"Jose María Camacho " +] +license = "MIT" +readme = "README.md" +exclude = [ +"longitude/core/tests/**", +"longitude/core/**/README.md" +] + +[tool.poetry.dependencies] +carto = "1.4" +python = "^3.6" +redis = "3.1" +psycopg2-binary = "2.7.7" +sqlalchemy = "1.3.0b2" + +[tool.poetry.dev-dependencies] +pdoc = "^0.3.2" +pylint = "^2.2" +coverage = "^4.5" +pytest-cov = "^2.6" +pytest-xdist = "^1.26" +pytest-sugar = "^0.9.2" +pytest-instafail = "^0.4.0" + +[tool.flit.metadata.urls] +Company = "https://geographica.gs" +Repository = "https://github.com/GeographicaGS/Longitude" + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" +