Skip to content

Commit

Permalink
Replace full_refresh usage in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
steinitzu committed Mar 7, 2024
1 parent 109c8c9 commit b448e5b
Show file tree
Hide file tree
Showing 24 changed files with 96 additions and 96 deletions.
2 changes: 1 addition & 1 deletion tests/cli/cases/deploy_pipeline/debug_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def example_source(api_url=dlt.config.value, api_key=dlt.secrets.value, last_id=
pipeline_name="debug_pipeline",
destination="postgres",
dataset_name="debug_pipeline_data",
full_refresh=False,
dev_mode=False,
)
load_info = p.run(example_source(last_id=819273998))
print(load_info)
8 changes: 4 additions & 4 deletions tests/extract/test_sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ def some_data(param: str):
# create two resource instances and extract in single ad hoc resource
data1 = some_data("state1")
data1._pipe.name = "state1_data"
dlt.pipeline(full_refresh=True).extract([data1, some_data("state2")], schema=Schema("default"))
dlt.pipeline(dev_mode=True).extract([data1, some_data("state2")], schema=Schema("default"))
# both should be extracted. what we test here is the combination of binding the resource by calling it that clones the internal pipe
# and then creating a source with both clones. if we keep same pipe id when cloning on call, a single pipe would be created shared by two resources
assert all_yields == ["state1", "state2"]
Expand Down Expand Up @@ -735,7 +735,7 @@ def test_source(no_resources):

def test_source_resource_attrs_with_conflicting_attrs() -> None:
"""Resource names that conflict with DltSource attributes do not work with attribute access"""
dlt.pipeline(full_refresh=True) # Create pipeline so state property can be accessed
dlt.pipeline(dev_mode=True) # Create pipeline so state property can be accessed
names = ["state", "resources", "schema", "name", "clone"]

@dlt.source
Expand Down Expand Up @@ -839,7 +839,7 @@ def test_source(expected_state):
with pytest.raises(PipelineStateNotAvailable):
test_source({}).state

dlt.pipeline(full_refresh=True)
dlt.pipeline(dev_mode=True)
assert test_source({}).state == {}

# inject state to see if what we write in state is there
Expand Down Expand Up @@ -869,7 +869,7 @@ def test_source():
with pytest.raises(PipelineStateNotAvailable):
s.test_resource.state

p = dlt.pipeline(full_refresh=True)
p = dlt.pipeline(dev_mode=True)
assert r.state == {}
assert s.state == {}
assert s.test_resource.state == {}
Expand Down
2 changes: 1 addition & 1 deletion tests/helpers/dbt_tests/local/test_runner_destinations.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def test_dbt_test_no_raw_schema(destination_info: DBTDestinationInfo) -> None:
assert isinstance(prq_ex.value.args[0], DBTProcessingError)


def test_dbt_run_full_refresh(destination_info: DBTDestinationInfo) -> None:
def test_dbt_run_dev_mode(destination_info: DBTDestinationInfo) -> None:
if destination_info.destination_name == "redshift":
pytest.skip("redshift disabled due to missing fixtures")
runner = setup_rasa_runner(destination_info.destination_name)
Expand Down
2 changes: 1 addition & 1 deletion tests/load/athena_iceberg/test_athena_iceberg.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def test_iceberg() -> None:
pipeline_name="athena-iceberg",
destination="athena",
staging="filesystem",
full_refresh=True,
dev_mode=True,
)

def items() -> Iterator[Any]:
Expand Down
28 changes: 14 additions & 14 deletions tests/load/bigquery/test_bigquery_table_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def test_create_table_with_integer_partition(gcp_client: BigQueryClient) -> None
ids=lambda x: x.name,
)
def test_bigquery_partition_by_date(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True)
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", dev_mode=True)

@dlt.resource(
write_disposition="merge",
Expand Down Expand Up @@ -231,7 +231,7 @@ def demo_source() -> DltResource:
ids=lambda x: x.name,
)
def test_bigquery_no_partition_by_date(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True)
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", dev_mode=True)

@dlt.resource(
write_disposition="merge",
Expand Down Expand Up @@ -266,7 +266,7 @@ def demo_source() -> DltResource:
ids=lambda x: x.name,
)
def test_bigquery_partition_by_timestamp(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True)
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", dev_mode=True)

@dlt.resource(
write_disposition="merge",
Expand Down Expand Up @@ -305,7 +305,7 @@ def demo_source() -> DltResource:
def test_bigquery_no_partition_by_timestamp(
destination_config: DestinationTestConfiguration,
) -> None:
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True)
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", dev_mode=True)

@dlt.resource(
write_disposition="merge",
Expand Down Expand Up @@ -342,7 +342,7 @@ def demo_source() -> DltResource:
ids=lambda x: x.name,
)
def test_bigquery_partition_by_integer(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True)
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", dev_mode=True)

@dlt.resource(
columns={"some_int": {"data_type": "bigint", "partition": True, "nullable": False}},
Expand Down Expand Up @@ -375,7 +375,7 @@ def demo_source() -> DltResource:
ids=lambda x: x.name,
)
def test_bigquery_no_partition_by_integer(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True)
pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", dev_mode=True)

@dlt.resource(
columns={"some_int": {"data_type": "bigint", "partition": False, "nullable": False}},
Expand Down Expand Up @@ -463,7 +463,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -523,7 +523,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -583,7 +583,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -708,7 +708,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -756,7 +756,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -844,7 +844,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -894,7 +894,7 @@ def sources() -> List[DltResource]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(sources())
Expand Down Expand Up @@ -937,7 +937,7 @@ def hints() -> Iterator[Dict[str, Any]]:

pipeline = destination_config.setup_pipeline(
f"bigquery_{uniq_id()}",
full_refresh=True,
dev_mode=True,
)

pipeline.run(hints)
Expand Down
6 changes: 3 additions & 3 deletions tests/load/pipeline/test_athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
ids=lambda x: x.name,
)
def test_athena_destinations(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline("athena_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("athena_" + uniq_id(), dev_mode=True)

@dlt.resource(name="items", write_disposition="append")
def items():
Expand Down Expand Up @@ -76,7 +76,7 @@ def items2():
def test_athena_all_datatypes_and_timestamps(
destination_config: DestinationTestConfiguration,
) -> None:
pipeline = destination_config.setup_pipeline("athena_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("athena_" + uniq_id(), dev_mode=True)

# TIME is not supported
column_schemas, data_types = table_update_and_row(exclude_types=["time"])
Expand Down Expand Up @@ -164,7 +164,7 @@ def my_source() -> Any:
ids=lambda x: x.name,
)
def test_athena_blocks_time_column(destination_config: DestinationTestConfiguration) -> None:
pipeline = destination_config.setup_pipeline("athena_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("athena_" + uniq_id(), dev_mode=True)

column_schemas, data_types = table_update_and_row()

Expand Down
6 changes: 3 additions & 3 deletions tests/load/pipeline/test_dbt_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def test_run_jaffle_package(
pytest.skip(
"dbt-athena requires database to be created and we don't do it in case of Jaffle"
)
pipeline = destination_config.setup_pipeline("jaffle_jaffle", full_refresh=True)
pipeline = destination_config.setup_pipeline("jaffle_jaffle", dev_mode=True)
# get runner, pass the env from fixture
dbt = dlt.dbt.package(pipeline, "https://github.com/dbt-labs/jaffle_shop.git", venv=dbt_venv)
# no default schema
Expand Down Expand Up @@ -76,7 +76,7 @@ def test_run_chess_dbt(destination_config: DestinationTestConfiguration, dbt_ven
os.environ["CHESS_URL"] = "https://api.chess.com/pub/"

pipeline = destination_config.setup_pipeline(
"chess_games", dataset_name="chess_dbt_test", full_refresh=True
"chess_games", dataset_name="chess_dbt_test", dev_mode=True
)
assert pipeline.default_schema_name is None
# get the runner for the "dbt_transform" package
Expand Down Expand Up @@ -129,7 +129,7 @@ def test_run_chess_dbt_to_other_dataset(
os.environ["CHESS_URL"] = "https://api.chess.com/pub/"

pipeline = destination_config.setup_pipeline(
"chess_games", dataset_name="chess_dbt_test", full_refresh=True
"chess_games", dataset_name="chess_dbt_test", dev_mode=True
)
# load each schema in separate dataset
pipeline.config.use_single_dataset = False
Expand Down
20 changes: 10 additions & 10 deletions tests/load/pipeline/test_drop.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def test_drop_command_resources_and_state(destination_config: DestinationTestCon
"""Test the drop command with resource and state path options and
verify correct data is deleted from destination and locally"""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -145,7 +145,7 @@ def test_drop_command_only_state(destination_config: DestinationTestConfiguratio
"""Test the drop command with resource and state path options and
verify correct data is deleted from destination and locally"""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -168,7 +168,7 @@ def test_drop_command_only_state(destination_config: DestinationTestConfiguratio
def test_drop_destination_tables_fails(destination_config: DestinationTestConfiguration) -> None:
"""Fail on drop tables. Command runs again."""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -194,7 +194,7 @@ def test_drop_destination_tables_fails(destination_config: DestinationTestConfig
def test_fail_after_drop_tables(destination_config: DestinationTestConfiguration) -> None:
"""Fail directly after drop tables. Command runs again ignoring destination tables missing."""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -218,7 +218,7 @@ def test_fail_after_drop_tables(destination_config: DestinationTestConfiguration
def test_load_step_fails(destination_config: DestinationTestConfiguration) -> None:
"""Test idempotence. pipeline.load() fails. Command can be run again successfully"""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -240,7 +240,7 @@ def test_load_step_fails(destination_config: DestinationTestConfiguration) -> No
)
def test_resource_regex(destination_config: DestinationTestConfiguration) -> None:
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -259,7 +259,7 @@ def test_resource_regex(destination_config: DestinationTestConfiguration) -> Non
def test_drop_nothing(destination_config: DestinationTestConfiguration) -> None:
"""No resources, no state keys. Nothing is changed."""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)

attached = _attach(pipeline)
Expand All @@ -277,7 +277,7 @@ def test_drop_nothing(destination_config: DestinationTestConfiguration) -> None:
def test_drop_all_flag(destination_config: DestinationTestConfiguration) -> None:
"""Using drop_all flag. Destination dataset and all local state is deleted"""
source = droppable_source()
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(source)
dlt_tables = [
t["name"] for t in pipeline.default_schema.dlt_tables()
Expand All @@ -303,7 +303,7 @@ def test_drop_all_flag(destination_config: DestinationTestConfiguration) -> None
)
def test_run_pipeline_after_partial_drop(destination_config: DestinationTestConfiguration) -> None:
"""Pipeline can be run again after dropping some resources"""
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(droppable_source())

attached = _attach(pipeline)
Expand All @@ -322,7 +322,7 @@ def test_run_pipeline_after_partial_drop(destination_config: DestinationTestConf
)
def test_drop_state_only(destination_config: DestinationTestConfiguration) -> None:
"""Pipeline can be run again after dropping some resources"""
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), full_refresh=True)
pipeline = destination_config.setup_pipeline("drop_test_" + uniq_id(), dev_mode=True)
pipeline.run(droppable_source())

attached = _attach(pipeline)
Expand Down
Loading

0 comments on commit b448e5b

Please sign in to comment.