Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Bump pydantic to ^2.4.2 [TCTC-7241] #1272

Merged
merged 36 commits into from
Dec 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
65a9774
chore: Bump pydantic to ^2.3.0
lukapeschke Sep 11, 2023
e178f74
chore: run bump-pydantic
lukapeschke Sep 11, 2023
51bb064
refactor: replace json_serializers
lukapeschke Sep 11, 2023
102eb89
fix: underscore_attrs_are_private
lukapeschke Sep 11, 2023
78e26f3
refactor: replace schema_extra
lukapeschke Sep 11, 2023
a0915c0
refactor validators with values
lukapeschke Sep 11, 2023
8143eda
chore: lint
lukapeschke Sep 11, 2023
22da7b4
fix: type annotation for oauth2_version
lukapeschke Sep 11, 2023
fdc3164
fix: replace root_validator with model_validator
lukapeschke Sep 11, 2023
6a19887
fix typing
lukapeschke Sep 11, 2023
a121226
refactor: Make data_source_model an attribute of __init_subclass__
lukapeschke Sep 11, 2023
0066455
update tests
lukapeschke Sep 11, 2023
9f0ce87
fix tests
lukapeschke Sep 12, 2023
0426ec6
Merge branch 'master' into upgrade-pydantic
lukapeschke Sep 12, 2023
8fa9d9a
blindly try to fix oracle tests
lukapeschke Sep 12, 2023
717f365
Merge branch 'master' into upgrade-pydantic
lukapeschke Sep 15, 2023
e47c477
adapt recently merged code
lukapeschke Sep 15, 2023
4e37a10
Merge branch 'master' into upgrade-pydantic
lukapeschke Sep 28, 2023
1744922
Update tests/google_sheets_2/test_google_sheets_2.py
lukapeschke Sep 29, 2023
d0dc261
bump pydandic ^2.4.2
lukapeschke Sep 29, 2023
3b3eb12
fix readme formatting
lukapeschke Sep 29, 2023
a9c0377
fix: remove merge conflict artifact
lukapeschke Sep 29, 2023
dcb3c28
chore: format
lukapeschke Sep 29, 2023
46bcf5b
Merge branch 'master' into upgrade-pydantic
lukapeschke Nov 22, 2023
778d2ac
Merge branch 'master' into upgrade-pydantic
lukapeschke Nov 22, 2023
eadde7a
chore: mypy
lukapeschke Nov 22, 2023
a3d0757
Merge branch 'master' into upgrade-pydantic
lukapeschke Nov 23, 2023
48a0560
Merge branch 'master' into upgrade-pydantic
lukapeschke Dec 4, 2023
f28474f
Merge branch 'master' into upgrade-pydantic
lukapeschke Dec 4, 2023
edd2858
fix: allow identifier to be None
lukapeschke Dec 5, 2023
2d6f547
fix: Add garbage code to mongo connector
lukapeschke Dec 6, 2023
6c44091
fix: do not use PlainJsonSecretStr for Oauth2
lukapeschke Dec 8, 2023
b96c321
fix horrible postgres typing
lukapeschke Dec 11, 2023
a7e52df
fix horrible http_api typing
lukapeschke Dec 11, 2023
a219852
fix horrible mysql typing
lukapeschke Dec 11, 2023
41b211d
Merge branch 'master' into upgrade-pydantic
lukapeschke Dec 15, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,8 @@ class MyTypeDataSource(ToucanDataSource):
query: str


class MyTypeConnector(ToucanConnector):
class MyTypeConnector(ToucanConnector, data_source_model=MyTypeDataSource):
"""Model of my connector"""
data_source_model: MyTypeDataSource

host: str
port: int
database: str
Expand Down
186 changes: 141 additions & 45 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ Authlib = "^1.0.1"
cached-property = "^1.5.2"
Jinja2 = "^3.0.3"
jq = "^1.2.2"
pydantic = "^1.9.1"
pydantic = "^2.4.2"
requests = "^2.28.0"
tenacity = "^8.0.1"
toucan-data-sdk = "^7.6.0"
Expand Down
4 changes: 1 addition & 3 deletions templates/connector.py.m4
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ class cap(TYPE)DataSource(ToucanDataSource):
query: str


class cap(TYPE)Connector(ToucanConnector):
data_source_model: cap(TYPE)DataSource

class cap(TYPE)Connector(ToucanConnector, data_source_model=cap(TYPE)DataSource):
username: str
password: str

Expand Down
8 changes: 5 additions & 3 deletions tests/anaplan/test_anaplan.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,11 @@ def test_get_form(connector):

# Ensure we've only requested a token once
responses.assert_call_count('https://auth.anaplan.com/token/authenticate', 1)
assert form_schema['definitions']['workspace_id']['enum'] == ['w1 - Workspace One']
assert form_schema['definitions']['model_id']['enum'] == ['m1 - Model One']
assert form_schema['definitions']['view_id']['enum'] == ['m1v1 - View One', 'm1v2 - View Two']
# We have a single values for these, so it's a const
assert form_schema['$defs']['workspace_id']['const'] == 'w1 - Workspace One'
assert form_schema['$defs']['model_id']['const'] == 'm1 - Model One'
# We have several values for this one, so enum
assert form_schema['$defs']['view_id']['enum'] == ['m1v1 - View One', 'm1v2 - View Two']


@responses.activate
Expand Down
2 changes: 1 addition & 1 deletion tests/awsathena/test_awsathena.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def test_athenadatasource_get_form(
assert result['properties']['domain']['title'] == 'Domain'
assert result['properties']['validation']['title'] == 'Validation'
assert result['required'] == ['domain', 'name', 'database']
assert result['definitions']['database']['enum'] == ['db1', 'db2']
assert result['$defs']['database']['enum'] == ['db1', 'db2']


@pytest.mark.usefixtures('data_source', 'mocked_boto_session')
Expand Down
42 changes: 12 additions & 30 deletions tests/clickhouse/test_clickhouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,20 +151,14 @@ def test_get_form_query_with_good_database(clickhouse_connector):
"""It should give suggestions of the collections"""
current_config = {'database': 'clickhouse_db'}
form = ClickhouseDataSource.get_form(clickhouse_connector, current_config)
assert form['properties']['database'] == {'$ref': '#/definitions/database'}
assert form['definitions']['database'] == {
assert form['properties']['database'] == {'$ref': '#/$defs/database'}
assert form['$defs']['database'] == {
'title': 'database',
'description': 'An enumeration.',
'enum': ['INFORMATION_SCHEMA', 'clickhouse_db', 'default', 'information_schema'],
'type': 'string',
}
assert form['properties']['table'] == {'$ref': '#/definitions/table'}
assert form['definitions']['table'] == {
'title': 'table',
'description': 'An enumeration.',
'type': 'string',
'enum': ['city'],
}
assert form['properties']['table'] == {'allOf': [{'$ref': '#/$defs/table'}], 'default': None}
assert form['$defs']['table'] == {'const': 'city', 'title': 'table', 'type': 'string'}
assert form['required'] == ['domain', 'name', 'database']


Expand All @@ -175,7 +169,7 @@ def test_get_form_connection_fails(mocker, clickhouse_connector):
assert 'table' in form['properties']


def test_schema_extra():
def test_model_json_schema():
data_source_spec = {
'domain': 'Clickhouse test',
'type': 'external_database',
Expand All @@ -184,25 +178,13 @@ def test_schema_extra():
'query': 'SELECT * FROM city WHERE id in %(ids)s',
'parameters': {'ids': [3986, 3958]},
}
conf = ClickhouseDataSource(**data_source_spec).Config
schema = {
'properties': {
'query': 'bar',
'parameters': 'bar',
'table': 'bar',
'database': 'bar',
}
}
conf.schema_extra(schema, model=ClickhouseDataSource)

assert schema == {
'properties': {
'database': 'bar',
'table': 'bar',
'query': 'bar',
'parameters': 'bar',
}
}
ds = ClickhouseDataSource(**data_source_spec)
assert list(ds.model_json_schema()['properties'].keys())[:4] == [
'database',
'table',
'query',
'parameters',
]


def test_create_connections():
Expand Down
2 changes: 1 addition & 1 deletion tests/github/test_github.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ def test_datasource_get_form_no_secret(gc, remove_secrets):
status=200,
)
res = ds.get_form(connector=gc, current_config={})
assert 'organization' not in res['definitions'].keys()
assert 'organization' not in res['$defs'].keys()


def test_get_slice(
Expand Down
44 changes: 16 additions & 28 deletions tests/google_adwords/test_google_adwords.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import pandas as pd
import responses
from pytest import fixture
from pytest_mock import MockerFixture

from toucan_connectors.common import HttpError
from toucan_connectors.google_adwords.google_adwords_connector import (
Expand Down Expand Up @@ -126,37 +127,20 @@ def test_retrieve_tokens(mocker, connector):
mock_oauth2_connector.retrieve_tokens.assert_called()


def test_schema_extra(build_data_service_source):
def test_model_json_schema(build_data_service_source: GoogleAdwordsDataSource):
"""
Check that schema_extra correctly
structures the Data Source form
"""
conf = build_data_service_source.Config
schema = {
'properties': {
'domain': 'bar',
'service': 'foo',
'columns': 'bababa',
'from_clause': 'foofoo',
'parameters': 'barbar',
'during': 'foobar',
'orderby': 'barfoo',
'limit': 100,
}
}
conf.schema_extra(schema, model=GoogleAdwordsDataSource)
assert schema == {
'properties': {
'service': 'foo',
'columns': 'bababa',
'from_clause': 'foofoo',
'parameters': 'barbar',
'during': 'foobar',
'orderby': 'barfoo',
'limit': 100,
'domain': 'bar',
}
}
assert list(build_data_service_source.model_json_schema()['properties'].keys())[:7] == [
'service',
'columns',
'from_clause',
'parameters',
'during',
'orderby',
'limit',
]


def test_get_connectors_secrets_form(connector):
Expand Down Expand Up @@ -194,7 +178,11 @@ def test_authenticate_client(connector, mocker):
mocked_refresh.assert_called_once()


def test_prepare_service_query(connector, mocker, build_data_service_source):
def test_prepare_service_query(
connector: GoogleAdwordsConnector,
mocker: MockerFixture,
build_data_service_source: GoogleAdwordsDataSource,
):
"""
Check that prepare_service_query is able to build
& return a service and a built service query
Expand Down
2 changes: 1 addition & 1 deletion tests/google_big_query/test_google_big_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -739,7 +739,7 @@ def mock_available_schs():

mocker.patch(
'toucan_connectors.google_big_query.google_big_query_connector.GoogleBigQueryConnector._available_schs',
return_value=mock_available_schs,
new=['ok', 'test'],
)

assert (
Expand Down
2 changes: 1 addition & 1 deletion tests/google_sheets/test_google_sheets.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def test_get_form(mocker):
current_config={'spreadsheet_id': 'test_spreadsheet_id'},
)
expected_results = ['sample data', 'animals']
assert schema['definitions']['sheet']['enum'] == expected_results
assert schema['$defs']['sheet']['enum'] == expected_results


def test_numeric_dateformat_():
Expand Down
13 changes: 8 additions & 5 deletions tests/google_sheets_2/test_google_sheets_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import pytest
from pytest import fixture
from pytest_mock import MockerFixture
from pytz import utc

from toucan_connectors.common import HttpError
Expand Down Expand Up @@ -82,8 +83,8 @@ async def test_authentified_fetch(mocker, con):
def get_columns_in_schema(schema):
"""Pydantic generates schema slightly differently in python <=3.7 and in python 3.8"""
try:
if schema.get('definitions'):
return schema['definitions']['sheet']['enum']
if defs := schema.get('$defs') or schema.get('definitions'):
return defs['sheet']['enum']
else:
return schema['properties']['sheet']['enum']
except KeyError:
Expand Down Expand Up @@ -267,13 +268,15 @@ def test_get_slice(mocker, con, ds):
assert ds.df.shape == (2, 2)


def test_get_slice_no_limit(mocker, con, ds):
def test_get_slice_no_limit(
mocker: MockerFixture, con: GoogleSheets2Connector, ds: GoogleSheets2DataSource
):
"""It should return a slice of spreadsheet"""
mocker.patch.object(GoogleSheets2Connector, '_run_fetch', return_value=FAKE_SHEET)

ds = con.get_slice(ds, limit=None)
slice = con.get_slice(ds, limit=None)

assert ds.df.shape == (2, 2)
assert slice.df.shape == (2, 2)


def test_schema_fields_order(con, ds):
Expand Down
37 changes: 11 additions & 26 deletions tests/http_api/test_http_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ def test_oauth2_oidc_authentication(mocker):
mock_session.assert_called_once()


def test_schema_extra():
def test_model_json_schema():
data_source_spec = {
'data': '',
'domain': 'Clickhouse test',
Expand All @@ -458,38 +458,23 @@ def test_schema_extra():
'validation': {},
'xpath': '',
}
conf = HttpAPIDataSource(**data_source_spec).Config

schema = {
'properties': {
'data': '',
'proxies': {},
'filter': '',
'flatten_column': '',
'validation': {},
'xpath': '',
}
}
conf.schema_extra(schema, model=HttpAPIDataSource)

assert schema == {
'properties': {
'proxies': {},
'flatten_column': '',
'data': '',
'xpath': '',
'filter': '',
'validation': {},
}
}
ds = HttpAPIDataSource(**data_source_spec)
assert list(ds.model_json_schema()['properties'].keys())[-6:] == [
'proxies',
'flatten_column',
'data',
'xpath',
'filter',
'validation',
]


def test_get_cache_key(connector, auth, data_source):
data_source.headers = {'name': '%(first_name)s'}
data_source.parameters = {'first_name': 'raphael'}
key = connector.get_cache_key(data_source)

assert key == 'fa95c942-9b94-3f07-9ed4-24c34abfbdae'
assert key == 'f24af0b5-f745-3961-8aec-a27d44543fb9'

data_source.headers = {'name': '{{ first_name }}'} # change the templating style
key2 = connector.get_cache_key(data_source)
Expand Down
37 changes: 12 additions & 25 deletions tests/linkedinads/test_linkedinads.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,9 @@ def test__retrieve_data(connector, create_datasource):


@responses.activate
def test__retrieve_data_no_nested_col(connector, create_datasource):
def test__retrieve_data_no_nested_col(
connector: LinkedinadsConnector, create_datasource: LinkedinadsDataSource
):
create_datasource.flatten_column = None
responses.add(
method='GET',
Expand Down Expand Up @@ -153,30 +155,15 @@ def test_retrieve_tokens(mocker, connector):
mock_oauth2_connector.retrieve_tokens.assert_called()


def test_schema_extra(create_datasource):
conf = create_datasource.Config
schema = {
'properties': {
'time_granularity': 'bar',
'flatten_column': 'bar',
'parameters': 'bar',
'finder_methods': 'bar',
'start_date': 'bar',
'end_date': 'bar',
}
}
conf.schema_extra(schema, model=LinkedinadsDataSource)

assert schema == {
'properties': {
'finder_methods': 'bar',
'start_date': 'bar',
'end_date': 'bar',
'time_granularity': 'bar',
'flatten_column': 'bar',
'parameters': 'bar',
}
}
def test_model_json_schema(create_datasource: LinkedinadsDataSource):
assert list(create_datasource.model_json_schema()['properties'].keys())[:6] == [
'finder_methods',
'start_date',
'end_date',
'time_granularity',
'flatten_column',
'parameters',
]


@responses.activate
Expand Down
Loading