Skip to content

Commit

Permalink
Remove a return type :(
Browse files Browse the repository at this point in the history
  • Loading branch information
tyler-hoffman committed Sep 11, 2024
1 parent e06f424 commit 158aa20
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 1 deletion.
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,7 +493,7 @@ def spark_session(test_backends) -> pyspark.SparkSession:


@pytest.fixture
def spark_connect_session(test_backends) -> pyspark.SparkConnectSession:
def spark_connect_session(test_backends):
from great_expectations.compatibility import pyspark

if pyspark.SparkConnectSession: # type: ignore[truthy-function]
Expand Down
44 changes: 44 additions & 0 deletions tests/integration/spark/test_spark_connect.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import logging

import pytest
from pyspark.sql import Row

import great_expectations as gx

logger = logging.getLogger(__name__)


pytestmark = pytest.mark.spark


def test_spark_connect(spark_connect_session, ephemeral_context_with_defaults):
context = ephemeral_context_with_defaults
df = spark_connect_session.createDataFrame(
[
Row(column=1),
Row(column=2),
Row(column=5),
]
)

bd = (
context.data_sources.add_spark(name="spark-connect-ds")
.add_dataframe_asset(name="spark-connect-asset")
.add_batch_definition_whole_dataframe(name="spark-connect-bd")
)
suite = context.suites.add(
gx.ExpectationSuite(
name="spark-connect-suite",
expectations=[
gx.expectations.ExpectColumnValuesToBeInSet(column="column", value_set=[1, 2, 5]),
],
)
)

vd = context.validation_definitions.add(
gx.ValidationDefinition(name="spark-connect-vd", suite=suite, data=bd)
)

results = vd.run(batch_parameters={"dataframe": df})

assert results.success

0 comments on commit 158aa20

Please sign in to comment.