From 5520f74002bd3d1dba01161220846f22da4bd146 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20He=C3=9Felmann=20=28lgtf/39809=29?= Date: Fri, 9 Apr 2021 16:21:19 +0200 Subject: [PATCH] Fix tests for dask dataframe and delayed backends --- kartothek/io/dask/delayed.py | 2 ++ kartothek/io/testing/update.py | 2 +- tests/io/dask/dataframe/test_update.py | 6 ++++++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/kartothek/io/dask/delayed.py b/kartothek/io/dask/delayed.py index bbf8c002..e308aa68 100644 --- a/kartothek/io/dask/delayed.py +++ b/kartothek/io/dask/delayed.py @@ -264,6 +264,7 @@ def update_dataset_from_delayed( sort_partitions_by=None, secondary_indices=None, factory=None, + table_name=SINGLE_TABLE, ): """ A dask.delayed graph to add and store a list of dictionaries containing @@ -304,6 +305,7 @@ def update_dataset_from_delayed( df_serializer=df_serializer, dataset_uuid=dataset_uuid, sort_partitions_by=sort_partitions_by, + dataset_table_name=table_name, ) return dask.delayed(update_dataset_from_partitions)( diff --git a/kartothek/io/testing/update.py b/kartothek/io/testing/update.py index a1029891..ea9bcd68 100644 --- a/kartothek/io/testing/update.py +++ b/kartothek/io/testing/update.py @@ -656,7 +656,7 @@ def test_update_of_dataset_with_non_default_table_name( [df_update], store=store_factory, dataset_uuid=dataset_uuid, - table="non-default-name", + table_name="non-default-name", partition_on=["date"], ) dm = DatasetMetadata.load_from_store(dataset_uuid, store_factory()) diff --git a/tests/io/dask/dataframe/test_update.py b/tests/io/dask/dataframe/test_update.py index 9a9d66f9..712f8d0d 100644 --- a/tests/io/dask/dataframe/test_update.py +++ b/tests/io/dask/dataframe/test_update.py @@ -32,6 +32,12 @@ def _update_dataset(partitions, *args, **kwargs): else: partitions = None + # Replace `table_name` with `table` keyword argument to enable shared test code + # via `bound_update_dataset` fixture + if "table_name" in kwargs: + kwargs["table"] = kwargs["table_name"] + del kwargs["table_name"] + ddf = update_dataset_from_ddf(partitions, *args, **kwargs) s = pickle.dumps(ddf, pickle.HIGHEST_PROTOCOL)