From 81285424fe8944c6001b787a55b0f7dd1fea57b8 Mon Sep 17 00:00:00 2001 From: JamieDeMaria Date: Fri, 26 Jan 2024 14:48:55 -0500 Subject: [PATCH] remove asset_partition_*_for_output from docs --- .../partitions-schedules-sensors/partitioning-assets.mdx | 4 ++-- docs/content/integrations/bigquery/reference.mdx | 4 ++-- docs/content/integrations/dbt/reference.mdx | 2 +- docs/content/integrations/deltalake/reference.mdx | 4 ++-- docs/content/integrations/duckdb/reference.mdx | 4 ++-- docs/content/integrations/snowflake/reference.mdx | 4 ++-- .../partitions_schedules_sensors/partitioned_asset.py | 2 +- .../partitioned_asset_uses_io_manager.py | 2 +- .../integrations/bigquery/reference/static_partition.py | 2 +- .../integrations/bigquery/reference/time_partition.py | 2 +- .../integrations/deltalake/static_partition.py | 2 +- .../docs_snippets/integrations/deltalake/time_partition.py | 2 +- .../integrations/duckdb/reference/static_partition.py | 2 +- .../integrations/duckdb/reference/time_partition.py | 2 +- .../integrations/snowflake/static_partition.py | 2 +- .../docs_snippets/integrations/snowflake/time_partition.py | 2 +- .../project_fully_featured/assets/core/id_range_for_time.py | 6 +++--- .../project_fully_featured/assets/core/items.py | 4 ++-- .../with_wandb/assets/advanced_partitions_example.py | 6 +++--- .../with_wandb/assets/multi_partitions_example.py | 5 +++-- .../with_wandb/assets/simple_partitions_example.py | 5 +++-- 21 files changed, 35 insertions(+), 33 deletions(-) diff --git a/docs/content/concepts/partitions-schedules-sensors/partitioning-assets.mdx b/docs/content/concepts/partitions-schedules-sensors/partitioning-assets.mdx index c214dbb5663d8..4c49213aff777 100644 --- a/docs/content/concepts/partitions-schedules-sensors/partitioning-assets.mdx +++ b/docs/content/concepts/partitions-schedules-sensors/partitioning-assets.mdx @@ -70,7 +70,7 @@ from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset @asset(partitions_def=DailyPartitionsDefinition(start_date="2023-10-01")) def my_daily_partitioned_asset(context: AssetExecutionContext) -> None: - partition_date_str = context.asset_partition_key_for_output() + partition_date_str = context.partition_key url = f"https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY&date={partition_date_str}" target_location = f"nasa/{partition_date_str}.csv" @@ -208,7 +208,7 @@ from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset @asset(partitions_def=DailyPartitionsDefinition(start_date="2022-01-01")) def my_daily_partitioned_asset(context: AssetExecutionContext) -> pd.DataFrame: - partition_date_str = context.asset_partition_key_for_output() + partition_date_str = context.partition_key return pd.read_csv(f"coolweatherwebsite.com/weather_obs&date={partition_date_str}") ``` diff --git a/docs/content/integrations/bigquery/reference.mdx b/docs/content/integrations/bigquery/reference.mdx index 1cc66f55d0f77..b4478d5e7400f 100644 --- a/docs/content/integrations/bigquery/reference.mdx +++ b/docs/content/integrations/bigquery/reference.mdx @@ -110,7 +110,7 @@ from dagster import AssetExecutionContext, StaticPartitionsDefinition, asset metadata={"partition_expr": "SPECIES"}, ) def iris_data_partitioned(context: AssetExecutionContext) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", @@ -165,7 +165,7 @@ from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset metadata={"partition_expr": "TIMESTAMP_SECONDS(TIME)"}, ) def iris_data_per_day(context: AssetExecutionContext) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'TIME' with that stores diff --git a/docs/content/integrations/dbt/reference.mdx b/docs/content/integrations/dbt/reference.mdx index 3fc50a7f1f85d..2ff26592dfaf1 100644 --- a/docs/content/integrations/dbt/reference.mdx +++ b/docs/content/integrations/dbt/reference.mdx @@ -681,7 +681,7 @@ You can define a Dagster 's start and end dates, and these can be passed to dbt's CLI as variables which can be used to filter incremental models. -When a partition definition to passed to the decorator, all assets are defined to operate on the same partitions. With this in mind, we can retrieve any time window from method in order to get the current start and end partitions. +When a partition definition to passed to the decorator, all assets are defined to operate on the same partitions. With this in mind, we can retrieve any time window from property in order to get the current start and end partitions. ```python import json diff --git a/docs/content/integrations/deltalake/reference.mdx b/docs/content/integrations/deltalake/reference.mdx index 4902838797bf1..0f51afab02715 100644 --- a/docs/content/integrations/deltalake/reference.mdx +++ b/docs/content/integrations/deltalake/reference.mdx @@ -81,7 +81,7 @@ from dagster import StaticPartitionsDefinition, asset metadata={"partition_expr": "species"}, ) def iris_dataset_partitioned(context) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", @@ -134,7 +134,7 @@ from dagster import DailyPartitionsDefinition, asset metadata={"partition_expr": "time"}, ) def iris_data_per_day(context) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'time' with that stores diff --git a/docs/content/integrations/duckdb/reference.mdx b/docs/content/integrations/duckdb/reference.mdx index a6dc79e7c31c9..65d60db545776 100644 --- a/docs/content/integrations/duckdb/reference.mdx +++ b/docs/content/integrations/duckdb/reference.mdx @@ -113,7 +113,7 @@ from dagster import AssetExecutionContext, StaticPartitionsDefinition, asset metadata={"partition_expr": "SPECIES"}, ) def iris_dataset_partitioned(context: AssetExecutionContext) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", @@ -168,7 +168,7 @@ from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset metadata={"partition_expr": "TO_TIMESTAMP(TIME)"}, ) def iris_data_per_day(context: AssetExecutionContext) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'time' with that stores diff --git a/docs/content/integrations/snowflake/reference.mdx b/docs/content/integrations/snowflake/reference.mdx index c58345565fa4e..5983350f9627d 100644 --- a/docs/content/integrations/snowflake/reference.mdx +++ b/docs/content/integrations/snowflake/reference.mdx @@ -131,7 +131,7 @@ from dagster import AssetExecutionContext, StaticPartitionsDefinition, asset metadata={"partition_expr": "SPECIES"}, ) def iris_dataset_partitioned(context: AssetExecutionContext) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", @@ -186,7 +186,7 @@ from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset metadata={"partition_expr": "TO_TIMESTAMP(TIME::INT)"}, ) def iris_data_per_day(context: AssetExecutionContext) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'time' with that stores diff --git a/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset.py b/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset.py index 0d48267f6330d..1464602f3f294 100644 --- a/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset.py +++ b/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset.py @@ -11,7 +11,7 @@ @asset(partitions_def=DailyPartitionsDefinition(start_date="2023-10-01")) def my_daily_partitioned_asset(context: AssetExecutionContext) -> None: - partition_date_str = context.asset_partition_key_for_output() + partition_date_str = context.partition_key url = f"https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY&date={partition_date_str}" target_location = f"nasa/{partition_date_str}.csv" diff --git a/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset_uses_io_manager.py b/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset_uses_io_manager.py index b192d3cb8eae3..9142f19489b24 100644 --- a/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset_uses_io_manager.py +++ b/examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/partitioned_asset_uses_io_manager.py @@ -5,5 +5,5 @@ @asset(partitions_def=DailyPartitionsDefinition(start_date="2022-01-01")) def my_daily_partitioned_asset(context: AssetExecutionContext) -> pd.DataFrame: - partition_date_str = context.asset_partition_key_for_output() + partition_date_str = context.partition_key return pd.read_csv(f"coolweatherwebsite.com/weather_obs&date={partition_date_str}") diff --git a/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/static_partition.py b/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/static_partition.py index 95271ef9bd08a..43133df5cfd10 100644 --- a/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/static_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/static_partition.py @@ -12,7 +12,7 @@ metadata={"partition_expr": "SPECIES"}, ) def iris_data_partitioned(context: AssetExecutionContext) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", diff --git a/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/time_partition.py b/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/time_partition.py index eb5688c27c97c..8c0f3377a046d 100644 --- a/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/time_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/bigquery/reference/time_partition.py @@ -14,7 +14,7 @@ def get_iris_data_for_date(*args, **kwargs): metadata={"partition_expr": "TIMESTAMP_SECONDS(TIME)"}, ) def iris_data_per_day(context: AssetExecutionContext) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'TIME' with that stores diff --git a/examples/docs_snippets/docs_snippets/integrations/deltalake/static_partition.py b/examples/docs_snippets/docs_snippets/integrations/deltalake/static_partition.py index 3e7243c423bcd..33bcab8d032c5 100644 --- a/examples/docs_snippets/docs_snippets/integrations/deltalake/static_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/deltalake/static_partition.py @@ -12,7 +12,7 @@ metadata={"partition_expr": "species"}, ) def iris_dataset_partitioned(context) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", diff --git a/examples/docs_snippets/docs_snippets/integrations/deltalake/time_partition.py b/examples/docs_snippets/docs_snippets/integrations/deltalake/time_partition.py index 7790a7e003d53..9cf1be2be9a39 100644 --- a/examples/docs_snippets/docs_snippets/integrations/deltalake/time_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/deltalake/time_partition.py @@ -14,7 +14,7 @@ def get_iris_data_for_date(*args, **kwargs): metadata={"partition_expr": "time"}, ) def iris_data_per_day(context) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'time' with that stores diff --git a/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/static_partition.py b/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/static_partition.py index 299c4176fa9cb..d6f5506bee8b0 100644 --- a/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/static_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/static_partition.py @@ -12,7 +12,7 @@ metadata={"partition_expr": "SPECIES"}, ) def iris_dataset_partitioned(context: AssetExecutionContext) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", diff --git a/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/time_partition.py b/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/time_partition.py index 7a7e2bc065bdc..120dbae8e05f0 100644 --- a/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/time_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/duckdb/reference/time_partition.py @@ -14,7 +14,7 @@ def get_iris_data_for_date(*args, **kwargs): metadata={"partition_expr": "TO_TIMESTAMP(TIME)"}, ) def iris_data_per_day(context: AssetExecutionContext) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'time' with that stores diff --git a/examples/docs_snippets/docs_snippets/integrations/snowflake/static_partition.py b/examples/docs_snippets/docs_snippets/integrations/snowflake/static_partition.py index 299c4176fa9cb..d6f5506bee8b0 100644 --- a/examples/docs_snippets/docs_snippets/integrations/snowflake/static_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/snowflake/static_partition.py @@ -12,7 +12,7 @@ metadata={"partition_expr": "SPECIES"}, ) def iris_dataset_partitioned(context: AssetExecutionContext) -> pd.DataFrame: - species = context.asset_partition_key_for_output() + species = context.partition_key full_df = pd.read_csv( "https://docs.dagster.io/assets/iris.csv", diff --git a/examples/docs_snippets/docs_snippets/integrations/snowflake/time_partition.py b/examples/docs_snippets/docs_snippets/integrations/snowflake/time_partition.py index 0632635fdf799..dd4cc22a03cc9 100644 --- a/examples/docs_snippets/docs_snippets/integrations/snowflake/time_partition.py +++ b/examples/docs_snippets/docs_snippets/integrations/snowflake/time_partition.py @@ -14,7 +14,7 @@ def get_iris_data_for_date(*args, **kwargs): metadata={"partition_expr": "TO_TIMESTAMP(TIME::INT)"}, ) def iris_data_per_day(context: AssetExecutionContext) -> pd.DataFrame: - partition = context.asset_partition_key_for_output() + partition = context.partition_key # get_iris_data_for_date fetches all of the iris data for a given date, # the returned dataframe contains a column named 'time' with that stores diff --git a/examples/project_fully_featured/project_fully_featured/assets/core/id_range_for_time.py b/examples/project_fully_featured/project_fully_featured/assets/core/id_range_for_time.py index ed8153e43f27d..64f9d4fc2c1ea 100644 --- a/examples/project_fully_featured/project_fully_featured/assets/core/id_range_for_time.py +++ b/examples/project_fully_featured/project_fully_featured/assets/core/id_range_for_time.py @@ -2,7 +2,7 @@ from typing import Any, Mapping, Tuple from dagster import ( - OpExecutionContext, + AssetExecutionContext, _check as check, ) @@ -89,8 +89,8 @@ def _get_item_timestamp(item_id): def id_range_for_time( - context: OpExecutionContext, hn_client: HNClient + context: AssetExecutionContext, hn_client: HNClient ) -> Tuple[Tuple[int, int], Mapping[str, Any]]: """For the configured time partition, searches for the range of ids that were created in that time.""" - start, end = context.asset_partitions_time_window_for_output() + start, end = context.partition_time_window return _id_range_for_time(int(start.timestamp()), int(end.timestamp()), hn_client) diff --git a/examples/project_fully_featured/project_fully_featured/assets/core/items.py b/examples/project_fully_featured/project_fully_featured/assets/core/items.py index 6b6d201dc51fc..7e782f1985b45 100644 --- a/examples/project_fully_featured/project_fully_featured/assets/core/items.py +++ b/examples/project_fully_featured/project_fully_featured/assets/core/items.py @@ -1,4 +1,4 @@ -from dagster import Output, asset +from dagster import AssetExecutionContext, Output, asset from pandas import DataFrame from pyspark.sql import DataFrame as SparkDF from pyspark.sql.types import ( @@ -39,7 +39,7 @@ partitions_def=hourly_partitions, key_prefix=["s3", "core"], ) -def items(context, hn_client: HNClient) -> Output[DataFrame]: +def items(context: AssetExecutionContext, hn_client: HNClient) -> Output[DataFrame]: """Items from the Hacker News API: each is a story or a comment on a story.""" (start_id, end_id), item_range_metadata = id_range_for_time(context, hn_client) diff --git a/examples/with_wandb/with_wandb/assets/advanced_partitions_example.py b/examples/with_wandb/with_wandb/assets/advanced_partitions_example.py index 2f66b4d47e7b5..314839735fae3 100644 --- a/examples/with_wandb/with_wandb/assets/advanced_partitions_example.py +++ b/examples/with_wandb/with_wandb/assets/advanced_partitions_example.py @@ -1,5 +1,5 @@ import wandb -from dagster import AssetIn, StaticPartitionsDefinition, asset +from dagster import AssetExecutionContext, AssetIn, StaticPartitionsDefinition, asset partitions_def = StaticPartitionsDefinition(["red", "orange", "yellow", "blue", "green"]) @@ -17,10 +17,10 @@ } }, ) -def write_advanced_artifact(context): +def write_advanced_artifact(context: AssetExecutionContext): """Example writing an Artifact with partitions and custom metadata.""" artifact = wandb.Artifact(ARTIFACT_NAME, "dataset") - partition_key = context.asset_partition_key_for_output() + partition_key = context.partition_key if partition_key == "red": return "red" diff --git a/examples/with_wandb/with_wandb/assets/multi_partitions_example.py b/examples/with_wandb/with_wandb/assets/multi_partitions_example.py index fe9da541636b0..de987407b40e4 100644 --- a/examples/with_wandb/with_wandb/assets/multi_partitions_example.py +++ b/examples/with_wandb/with_wandb/assets/multi_partitions_example.py @@ -1,5 +1,6 @@ import wandb from dagster import ( + AssetExecutionContext, AssetIn, DailyPartitionsDefinition, MultiPartitionsDefinition, @@ -26,9 +27,9 @@ } }, ) -def create_my_multi_partitioned_asset(context): +def create_my_multi_partitioned_asset(context: AssetExecutionContext): """Example writing an Artifact with mutli partitions and custom metadata.""" - partition_key = context.asset_partition_key_for_output() + partition_key = context.partition_key context.log.info(f"Creating partitioned asset for {partition_key}") if partition_key == "red|2023-01-02": artifact = wandb.Artifact("my_multi_partitioned_asset", "dataset") diff --git a/examples/with_wandb/with_wandb/assets/simple_partitions_example.py b/examples/with_wandb/with_wandb/assets/simple_partitions_example.py index 48a9f2349d2bf..da264ea28cee6 100644 --- a/examples/with_wandb/with_wandb/assets/simple_partitions_example.py +++ b/examples/with_wandb/with_wandb/assets/simple_partitions_example.py @@ -1,6 +1,7 @@ import random from dagster import ( + AssetExecutionContext, AssetIn, DailyPartitionsDefinition, TimeWindowPartitionMapping, @@ -21,11 +22,11 @@ } }, ) -def create_my_daily_partitioned_asset(context): +def create_my_daily_partitioned_asset(context: AssetExecutionContext): """Example writing an Artifact with daily partitions and custom metadata.""" # Happens when the asset is materialized in multiple runs (one per partition) if context.has_partition_key: - partition_key = context.asset_partition_key_for_output() + partition_key = context.partition_key context.log.info(f"Creating partitioned asset for {partition_key}") return random.randint(0, 100)