Skip to content

Commit

Permalink
Add metric return type
Browse files Browse the repository at this point in the history
  • Loading branch information
NathanFarmer committed Sep 20, 2024
1 parent 3fd6dbd commit 15110d7
Showing 1 changed file with 8 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

from typing import TYPE_CHECKING, Any, Dict, List, Sequence, Union

from typing_extensions import Annotated, TypeAlias

from great_expectations.compatibility.sqlalchemy import (
sqlalchemy as sa,
)
Expand All @@ -22,6 +24,9 @@
from great_expectations.compatibility import pyspark


QueryTableRecords: TypeAlias = Annotated[List[dict], MAX_IN_MEMORY_RECORDS_ALLOWED]


class QueryTable(QueryMetricProvider):
metric_name = "query.table"
value_keys = ("query",)
Expand All @@ -35,7 +40,7 @@ def _sqlalchemy(
metric_value_kwargs: dict,
metrics: Dict[str, Any],
runtime_configuration: dict,
) -> List[dict]:
) -> QueryTableRecords:
query = cls._get_query_from_metric_value_kwargs(metric_value_kwargs)

batch_selectable: sa.sql.Selectable
Expand Down Expand Up @@ -83,7 +88,7 @@ def _spark(
metric_value_kwargs: dict,
metrics: Dict[str, Any],
runtime_configuration: dict,
) -> List[dict]:
) -> QueryTableRecords:
query = cls._get_query_from_metric_value_kwargs(metric_value_kwargs)

df: pyspark.DataFrame
Expand All @@ -95,6 +100,6 @@ def _spark(
query = query.format(batch="tmp_view")

engine: pyspark.SparkSession = execution_engine.spark
result: List[pyspark.Row] = engine.sql(query).collect()
result: List[pyspark.Row] = engine.sql(query).collect(MAX_IN_MEMORY_RECORDS_ALLOWED)

Check warning on line 103 in great_expectations/expectations/metrics/query_metrics/query_table.py

View check run for this annotation

Codecov / codecov/patch

great_expectations/expectations/metrics/query_metrics/query_table.py#L103

Added line #L103 was not covered by tests

return [element.asDict() for element in result]

0 comments on commit 15110d7

Please sign in to comment.