Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix type hint #194

Merged
merged 3 commits into from
Oct 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions app_utils/shared_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from datetime import datetime
from enum import Enum
from io import StringIO
from typing import Any, Optional, List
from typing import Any, Optional, List, Union

import pandas as pd
import streamlit as st
Expand Down Expand Up @@ -200,7 +200,7 @@ def get_available_stages(schema: str) -> List[str]:
return fetch_stages_in_schema(get_snowflake_connection(), schema)


def stage_selector_container() -> None | List[str]:
def stage_selector_container() -> Optional[List[str]]:
"""
Common component that encapsulates db/schema/stage selection for the admin app.
When a db/schema/stage is selected, it is saved to the session state for reading elsewhere.
Expand Down Expand Up @@ -1247,23 +1247,23 @@ class AppMetadata:
"""

@property
def user(self) -> str | None:
def user(self) -> Optional[str]:
return os.getenv("SNOWFLAKE_USER")

@property
def stage(self) -> str | None:
def stage(self) -> Optional[str]:
if stage_exists():
stage = st.session_state.snowflake_stage
return f"{stage.stage_database}.{stage.stage_schema}.{stage.stage_name}"
return None

@property
def model(self) -> str | None:
def model(self) -> Optional[str]:
if semantic_model_exists():
return st.session_state.semantic_model.name # type: ignore
return None

def to_dict(self) -> dict[str, str | None]:
def to_dict(self) -> dict[str, Union[str,None]]:
return {
"User": self.user,
"Stage": self.stage,
Expand Down
14 changes: 7 additions & 7 deletions partner/cortex.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ def __init__(self, data: dict[str, Any]):

self.data: dict[str, Any] = data
self.name: str = data["name"]
self.synonyms: None | list[str] = data.get("synonyms", None)
self.synonyms: Optional[list[str]] = data.get("synonyms", None)
self.data_type: str = data.get("data_type", "TEXT")
self.expr: str = data["expr"]
self.description: None | str = data.get("description", None)
self.sample_values: None | list[str] = data.get("sample_values", None)
self.description: Optional[str] = data.get("description", None)
self.sample_values: Optional[list[str]] = data.get("sample_values", None)
self.unique: bool = data.get("unique", False)

def get_name(self) -> str:
Expand Down Expand Up @@ -90,13 +90,13 @@ class CortexSemanticTable:
def __init__(self, data: dict[str, Any]):
self.data: dict[str, Any] = data
self.name: str = data["name"]
self.description: None | str = data["description"]
self.description: Optional[str] = data["description"]
self.base_table_db: str = data["base_table"]["database"]
self.base_table_schema: str = data["base_table"]["schema"]
self.base_table_table: str = data["base_table"]["table"]
self.dimensions: None | list[dict[str, Any]] = data["dimensions"]
self.time_dimensions: None | list[dict[str, Any]] = data["time_dimensions"]
self.measures: None | list[dict[str, Any]] = data["measures"]
self.dimensions: Optional[list[dict[str, Any]]] = data["dimensions"]
self.time_dimensions: Optional[list[dict[str, Any]]] = data["time_dimensions"]
self.measures: Optional[list[dict[str, Any]]] = data["measures"]

def get_data(self) -> dict[str, Any]:
return self.data
Expand Down
16 changes: 8 additions & 8 deletions partner/dbt.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Optional
from typing import Any, Optional, Union

import pandas as pd
import streamlit as st
Expand Down Expand Up @@ -80,7 +80,7 @@ def upload_dbt_semantic() -> None:
key="dbt_files",
)
if uploaded_files:
partner_semantic: list[None | DBTSemanticModel] = []
partner_semantic: list[Union[None,DBTSemanticModel]] = []
for file in uploaded_files:
partner_semantic.extend(read_dbt_yaml(file)) # type: ignore

Expand Down Expand Up @@ -111,7 +111,7 @@ def __init__(self, entity: dict[str, Any]):
self.name: str = entity["name"]
self.type: str = entity.get("type", None)
self.expr: str = entity.get("expr", self.name)
self.description: None | str = entity.get("description", None)
self.description: Optional[str] = entity.get("description", None)
self.cortex_map = {
"name": self.name,
"description": self.description,
Expand Down Expand Up @@ -153,7 +153,7 @@ class DBTMeasure(DBTEntity):

def __init__(self, entity: dict[str, Any]):
super().__init__(entity)
self.agg: None | str = entity.get("agg", None)
self.agg: Optional[str] = entity.get("agg", None)
self.cortex_map = {
"name": self.name,
"description": self.description,
Expand Down Expand Up @@ -195,10 +195,10 @@ class DBTSemanticModel:
def __init__(self, data: dict[str, Any]):
self.data: dict[str, Any] = data
self.name: str = data["name"]
self.description: None | str = data.get("description", None)
self.entities: None | list[dict[str, Any]] = data["entities"]
self.dimensions: None | list[dict[str, Any]] = data["dimensions"]
self.measures: None | list[dict[str, Any]] = data["measures"]
self.description: Optional[str] = data.get("description", None)
self.entities: Optional[list[dict[str, Any]]] = data["entities"]
self.dimensions: Optional[list[dict[str, Any]]] = data["dimensions"]
self.measures: Optional[list[dict[str, Any]]] = data["measures"]

def get_data(self) -> dict[str, Any]:
return self.data
Expand Down
4 changes: 2 additions & 2 deletions partner/looker.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Any, Optional
from typing import Any, Optional, Union

import pandas as pd
import streamlit as st
Expand Down Expand Up @@ -523,7 +523,7 @@ def render_looker_explore_as_table(
target_lag: Optional[int] = 20,
target_lag_unit: Optional[str] = "minutes",
warehouse: Optional[str] = None,
) -> None | dict[str, dict[str, str]]:
) -> Union[None,dict[str, dict[str, str]]]:
"""
Creates materialized table corresponding to Looker Explore.
Args:
Expand Down
2 changes: 1 addition & 1 deletion partner/partner_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def __init__(self, row_data: pd.Series) -> None: # type: ignore
else {}
)

def render_row(self) -> None | dict[str, Any]: # type: ignore
def render_row(self) -> Union[None, dict[str, Any]]: # type: ignore
toggle_options = ["merged", "cortex", "partner", "remove"]
metadata = {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def _get_column_comment(
def get_table_primary_keys(
conn: SnowflakeConnection,
table_fqn: str,
) -> list[str] | None:
) -> Optional[list[str]]:
query = f"show primary keys in table {table_fqn};"
cursor = conn.cursor()
cursor.execute(query)
Expand Down
Loading