diff --git a/docs/docs-beta/docs/dagster-plus/deployment/deployment-types/serverless/security.md b/docs/docs-beta/docs/dagster-plus/deployment/deployment-types/serverless/security.md
index 1996c5fd07abf..d3dcfc23fc69d 100644
--- a/docs/docs-beta/docs/dagster-plus/deployment/deployment-types/serverless/security.md
+++ b/docs/docs-beta/docs/dagster-plus/deployment/deployment-types/serverless/security.md
@@ -22,11 +22,11 @@ The default I/O manager cannot be used if you are a Serverless user who:
- Are otherwise working with data subject to GDPR or other such regulations
:::
-In Serverless, code that uses the default [I/O manager](/guides/build/configure/io-managers) is automatically adjusted to save data in Dagster+ managed storage. This automatic change is useful because the Serverless filesystem is ephemeral, which means the default I/O manager wouldn't work as expected.
+In Serverless, code that uses the default [I/O manager](/guides/operate/io-managers) is automatically adjusted to save data in Dagster+ managed storage. This automatic change is useful because the Serverless filesystem is ephemeral, which means the default I/O manager wouldn't work as expected.
However, this automatic change also means potentially sensitive data could be **stored** and not just processed or orchestrated by Dagster+.
-To prevent this, you can use [another I/O manager](/guides/build/configure/io-managers#built-in) that stores data in your infrastructure or [adapt your code to avoid using an I/O manager](/guides/build/configure/io-managers#before-you-begin).
+To prevent this, you can use [another I/O manager](/guides/operate/io-managers#built-in) that stores data in your infrastructure or [adapt your code to avoid using an I/O manager](/guides/operate/io-managers#before-you-begin).
:::note
You must have [boto3](https://pypi.org/project/boto3/) or `dagster-cloud[serverless]` installed as a project dependency otherwise the Dagster+ managed storage can fail and silently fall back to using the default I/O manager.
diff --git a/docs/docs-beta/docs/dagster-plus/deployment/management/settings/customizing-agent-settings.md b/docs/docs-beta/docs/dagster-plus/deployment/management/settings/customizing-agent-settings.md
index 726017a31251e..dad9c38061c50 100644
--- a/docs/docs-beta/docs/dagster-plus/deployment/management/settings/customizing-agent-settings.md
+++ b/docs/docs-beta/docs/dagster-plus/deployment/management/settings/customizing-agent-settings.md
@@ -132,4 +132,4 @@ compute_logs:
ServerSideEncryption: "AES256"
show_url_only: true
region: "us-west-1"
-```
\ No newline at end of file
+```
diff --git a/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/rbac/user-roles-permissions.md b/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/rbac/user-roles-permissions.md
index f1edcbb2d1cd3..2242c7b52d9c4 100644
--- a/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/rbac/user-roles-permissions.md
+++ b/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/rbac/user-roles-permissions.md
@@ -115,7 +115,7 @@ TODO: add picture previously at "/images/dagster-cloud/user-token-management/cod
| Start and stop [schedules](/guides/automate/schedules) | ❌ | ❌ | ✅ | ✅ | ✅ |
| Start and stop [schedules](/guides/automate/sensors) | ❌ | ❌ | ✅ | ✅ | ✅ |
| Wipe assets | ❌ | ❌ | ✅ | ✅ | ✅ |
-| Launch and cancel [schedules](/guides/build/backfill) | ❌ | ✅ | ✅ | ✅ | ✅ |
+| Launch and cancel [schedules](/guides/automate/schedules) | ❌ | ✅ | ✅ | ✅ | ✅ |
| Add dynamic partitions | ❌ | ❌ | ✅ | ✅ | ✅ |
### Deployments
diff --git a/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/scim/okta-scim.md b/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/scim/okta-scim.md
index 139740bf5e532..a575f1688d586 100644
--- a/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/scim/okta-scim.md
+++ b/docs/docs-beta/docs/dagster-plus/features/authentication-and-access-control/scim/okta-scim.md
@@ -18,7 +18,7 @@ In this guide, we'll walk you through configuring [Okta SCIM provisioning](https
With Dagster+'s Okta SCIM provisioning feature, you can:
- **Create users**. Users that are assigned to the Dagster+ application in the IdP will be automatically added to your Dagster+ organization.
-- **Update user attributes.** Updating a user’s name or email address in the IdP will automatically sync the change to your user list in Dagster+.
+- **Update user attributes.** Updating a user's name or email address in the IdP will automatically sync the change to your user list in Dagster+.
- **Remove users.** Deactivating or unassigning a user from the Dagster+ application in the IdP will remove them from the Dagster+ organization
{/* - **Push user groups.** Groups and their members in the IdP can be pushed to Dagster+ as [Teams](/dagster-plus/account/managing-users/managing-teams). */}
- **Push user groups.** Groups and their members in the IdP can be pushed to Dagster+ as
diff --git a/docs/docs-beta/docs/dagster-plus/features/catalog-views.md b/docs/docs-beta/docs/dagster-plus/features/catalog-views.md
index 201467bd939ad..750ee2781e7d8 100644
--- a/docs/docs-beta/docs/dagster-plus/features/catalog-views.md
+++ b/docs/docs-beta/docs/dagster-plus/features/catalog-views.md
@@ -17,7 +17,7 @@ In this guide, you'll learn how to create, access, and share catalog views with
Prerequisites
- **Organization Admin**, **Admin**, or **Editor** permissions on Dagster+
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx and [Asset metadata](/guides/build/create-a-pipeline/metadata)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx and [Asset metadata](/guides/build/create-asset-pipelines/metadata)
diff --git a/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/change-tracking.md b/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/change-tracking.md
index 03a4feae766e8..2e2efbecf4a86 100644
--- a/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/change-tracking.md
+++ b/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/change-tracking.md
@@ -8,7 +8,7 @@ unlisted: true
This guide is applicable to Dagster+.
:::
-Branch Deployments Change Tracking makes it eaiser for you and your team to identify how changes in a pull request will impact data assets. By the end of this guide, you'll understand how Change Tracking works and what types of asset changes can be detected.
+Branch Deployments Change Tracking makes it easier for you and your team to identify how changes in a pull request will impact data assets. By the end of this guide, you'll understand how Change Tracking works and what types of asset changes can be detected.
## How it works
diff --git a/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/testing.md b/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/testing.md
index cdc92aeb8bb45..93f96ee47de3e 100644
--- a/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/testing.md
+++ b/docs/docs-beta/docs/dagster-plus/features/ci-cd/branch-deployments/testing.md
@@ -8,14 +8,14 @@ unlisted: true
This guide is applicable to Dagster+.
:::
-This guide details a workflow to test Dagster code in your cloud environment without impacting your production data. To highlight this functionality, we’ll leverage Dagster+ branch deployments and a Snowflake database to:
+This guide details a workflow to test Dagster code in your cloud environment without impacting your production data. To highlight this functionality, we'll leverage Dagster+ branch deployments and a Snowflake database to:
- Execute code on a feature branch directly on Dagster+
- Read and write to a unique per-branch clone of our Snowflake data
With these tools, we can merge changes with confidence in the impact on our data platform and with the assurance that our code will execute as intended.
-Here’s an overview of the main concepts we’ll be using:
+Here’s an overview of the main concepts we'll be using:
{/* - [Assets](/concepts/assets/software-defined-assets) - We'll define three assets that each persist a table to Snowflake. */}
- [Assets](/todo) - We'll define three assets that each persist a table to Snowflake.
@@ -35,7 +35,7 @@ Here’s an overview of the main concepts we’ll be using:
## Prerequisites
:::note
- This guide is an extension of the Transitioning data pipelines from development to production guide, illustrating a workflow for staging deployments. We’ll use the examples from this guide to build a workflow atop Dagster+’s branch deployment feature.
+ This guide is an extension of the Transitioning data pipelines from development to production guide, illustrating a workflow for staging deployments. We'll use the examples from this guide to build a workflow atop Dagster+’s branch deployment feature.
:::
To complete the steps in this guide, you'll need:
@@ -52,7 +52,7 @@ To complete the steps in this guide, you'll need:
## Overview
-We have a `PRODUCTION` Snowflake database with a schema named `HACKER_NEWS`. In our production cloud environment, we’d like to write tables to Snowflake containing subsets of Hacker News data. These tables will be:
+We have a `PRODUCTION` Snowflake database with a schema named `HACKER_NEWS`. In our production cloud environment, we'd like to write tables to Snowflake containing subsets of Hacker News data. These tables will be:
- `ITEMS` - A table containing the entire dataset
- `COMMENTS` - A table containing data about comments
@@ -128,14 +128,14 @@ As you can see, our assets use an [I/O manager](/todo) named `snowflake_io_manag
## Step 2: Configure our assets for each environment
-At runtime, we’d like to determine which environment our code is running in: branch deployment, or production. This information dictates how our code should execute, specifically with which credentials and with which database.
+At runtime, we'd like to determine which environment our code is running in: branch deployment, or production. This information dictates how our code should execute, specifically with which credentials and with which database.
-To ensure we can't accidentally write to production from within our branch deployment, we’ll use a different set of credentials from production and write to our database clone.
+To ensure we can't accidentally write to production from within our branch deployment, we'll use a different set of credentials from production and write to our database clone.
{/* Dagster automatically sets certain [environment variables](/dagster-plus/managing-deployments/reserved-environment-variables) containing deployment metadata, allowing us to read these environment variables to discern between deployments. We can access the `DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT` environment variable to determine the currently executing environment. */}
Dagster automatically sets certain [environment variables](/todo) containing deployment metadata, allowing us to read these environment variables to discern between deployments. We can access the `DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT` environment variable to determine the currently executing environment.
-Because we want to configure our assets to write to Snowflake using a different set of credentials and database in each environment, we’ll configure a separate I/O manager for each environment:
+Because we want to configure our assets to write to Snowflake using a different set of credentials and database in each environment, we'll configure a separate I/O manager for each environment:
```python file=/guides/dagster/development_to_production/branch_deployments/repository_v1.py startafter=start_repository endbefore=end_repository
# definitions.py
@@ -232,7 +232,7 @@ def drop_prod_clone():
drop_database_clone()
```
-We’ve defined `drop_database_clone` and `clone_production_database` to utilize the . The Snowflake resource will use the same configuration as the Snowflake I/O manager to generate a connection to Snowflake. However, while our I/O manager writes outputs to Snowflake, the Snowflake resource executes queries against Snowflake.
+We've defined `drop_database_clone` and `clone_production_database` to utilize the . The Snowflake resource will use the same configuration as the Snowflake I/O manager to generate a connection to Snowflake. However, while our I/O manager writes outputs to Snowflake, the Snowflake resource executes queries against Snowflake.
We now need to define resources that configure our jobs to the current environment. We can modify the resource mapping by environment as follows:
@@ -322,7 +322,7 @@ Opening a pull request for our current branch will automatically kick off a bran
Alternatively, the logs for the branch deployment workflow can be found in the **Actions** tab on the GitHub pull request.
-We can also view our database in Snowflake to confirm that a clone exists for each branch deployment. When we materialize our assets within our branch deployment, we’ll now be writing to our clone of `PRODUCTION`. Within Snowflake, we can run queries against this clone to confirm the validity of our data:
+We can also view our database in Snowflake to confirm that a clone exists for each branch deployment. When we materialize our assets within our branch deployment, we'll now be writing to our clone of `PRODUCTION`. Within Snowflake, we can run queries against this clone to confirm the validity of our data:
![Instance overview](/images/guides/development_to_production/branch_deployments/snowflake.png)
@@ -383,7 +383,7 @@ Opening a merge request for our current branch will automatically kick off a bra
![Instance overview](/images/guides/development_to_production/branch_deployments/instance_overview.png)
-We can also view our database in Snowflake to confirm that a clone exists for each branch deployment. When we materialize our assets within our branch deployment, we’ll now be writing to our clone of `PRODUCTION`. Within Snowflake, we can run queries against this clone to confirm the validity of our data:
+We can also view our database in Snowflake to confirm that a clone exists for each branch deployment. When we materialize our assets within our branch deployment, we'll now be writing to our clone of `PRODUCTION`. Within Snowflake, we can run queries against this clone to confirm the validity of our data:
![Instance overview](/images/guides/development_to_production/branch_deployments/snowflake.png)
@@ -489,4 +489,4 @@ close_branch:
After merging our branch, viewing our Snowflake database will confirm that our branch deployment step has successfully deleted our database clone.
-We’ve now built an elegant workflow that enables future branch deployments to automatically have access to their own clones of our production database that are cleaned up upon merge!
+We've now built an elegant workflow that enables future branch deployments to automatically have access to their own clones of our production database that are cleaned up upon merge!
diff --git a/docs/docs-beta/docs/dagster-plus/features/insights/asset-metadata.md b/docs/docs-beta/docs/dagster-plus/features/insights/asset-metadata.md
index ba05db163f29d..e0aecebc1bef4 100644
--- a/docs/docs-beta/docs/dagster-plus/features/insights/asset-metadata.md
+++ b/docs/docs-beta/docs/dagster-plus/features/insights/asset-metadata.md
@@ -21,7 +21,7 @@ You'll need one or more assets that emit the same metadata key at run time. Insi
are most valuable when you have multiple assets that emit the same kind of metadata, such as
such as the number of rows processed or the size of a file uploaded to object storage.
-Follow [the metadata guide](/guides/build/create-a-pipeline/metadata#runtime-metadata) to add numeric metadata
+Follow [the metadata guide](/guides/build/create-asset-pipelines/metadata#runtime-metadata) to add numeric metadata
to your asset materializations.
## Step 2: Enable viewing your metadata in Dagster+ Insights
diff --git a/docs/docs-beta/docs/dagster-plus/index.md b/docs/docs-beta/docs/dagster-plus/index.md
index fa8e16a0df269..77b8ba4b0511e 100644
--- a/docs/docs-beta/docs/dagster-plus/index.md
+++ b/docs/docs-beta/docs/dagster-plus/index.md
@@ -7,7 +7,7 @@ Dagster+ is a managed orchestration platform built on top of Dagster's open sour
Dagster+ is built to be the most performant, reliable, and cost effective way for data engineering teams to run Dagster in production. Dagster+ is also great for students, researchers, or individuals who want to explore Dagster with minimal overhead.
-Dagster+ comes in two flavors: a fully [Serverless](/dagster-plus/deployment/deployment-types/serverless) offering and a [Hybrid](/dagster-plus/deployment/deployment-types/hybrid) offering. In both cases, Dagster+ does the hard work of managing your data orchestration control plane. Compared to a [Dagster open source deployment](/guides/), Dagster+ manages:
+Dagster+ comes in two flavors: a fully [Serverless](/dagster-plus/deployment/deployment-types/serverless) offering and a [Hybrid](/dagster-plus/deployment/deployment-types/hybrid) offering. In both cases, Dagster+ does the hard work of managing your data orchestration control plane. Compared to a [Dagster open source deployment](guides/deploy/index.md), Dagster+ manages:
- Dagster's web UI at https://dagster.plus
- Metadata stores for data cataloging and cost insights
diff --git a/docs/docs-beta/docs/getting-started/glossary.md b/docs/docs-beta/docs/getting-started/glossary.md
index 8d0b5a8038b65..f2319cb8b8100 100644
--- a/docs/docs-beta/docs/getting-started/glossary.md
+++ b/docs/docs-beta/docs/getting-started/glossary.md
@@ -1,7 +1,6 @@
---
title: Glossary
sidebar_position: 30
-sidebar_label: Glossary
unlisted: true
---
diff --git a/docs/docs-beta/docs/getting-started/installation.md b/docs/docs-beta/docs/getting-started/installation.md
index e35319282fb79..80b27bcbc49dd 100644
--- a/docs/docs-beta/docs/getting-started/installation.md
+++ b/docs/docs-beta/docs/getting-started/installation.md
@@ -5,8 +5,6 @@ sidebar_position: 20
sidebar_label: Installation
---
-# Installing Dagster
-
To follow the steps in this guide, you'll need:
- To install Python 3.9 or higher. **Python 3.12 is recommended**.
@@ -72,4 +70,4 @@ If you encounter any issues during the installation process:
## Next steps
- Get up and running with your first Dagster project in the [Quickstart](/getting-started/quickstart)
-- Learn to [create data assets in Dagster](/guides/build/create-a-pipeline/data-assets)
+- Learn to [create data assets in Dagster](/guides/build/create-asset-pipelines/data-assets)
diff --git a/docs/docs-beta/docs/getting-started/quickstart.md b/docs/docs-beta/docs/getting-started/quickstart.md
index afb15e582ed45..4cf472297aa99 100644
--- a/docs/docs-beta/docs/getting-started/quickstart.md
+++ b/docs/docs-beta/docs/getting-started/quickstart.md
@@ -1,12 +1,10 @@
---
-title: "Dagster quickstart"
+title: Build your first Dagster project
description: Learn how to quickly get up and running with Dagster
sidebar_position: 30
sidebar_label: "Quickstart"
---
-# Build your first Dagster project
-
Welcome to Dagster! In this guide, you'll use Dagster to create a basic pipeline that:
- Extracts data from a CSV file
@@ -154,4 +152,4 @@ id,name,age,city,age_group
Congratulations! You've just built and run your first pipeline with Dagster. Next, you can:
- Continue with the [ETL pipeline tutorial](/tutorial/tutorial-etl) to learn how to build a more complex ETL pipeline
-- Learn how to [Think in assets](/guides/build/assets-concepts/index.md)
+- Learn how to [Think in assets](/guides/build/create-asset-pipelines/assets-concepts/index.md)
diff --git a/docs/docs-beta/docs/guides/automate/about-automation.md b/docs/docs-beta/docs/guides/automate/about-automation.md
index 70f0f94643e50..5eff71c1d7a29 100644
--- a/docs/docs-beta/docs/guides/automate/about-automation.md
+++ b/docs/docs-beta/docs/guides/automate/about-automation.md
@@ -3,6 +3,8 @@ title: About Automation
unlisted: true
---
+{/* TODO combine with index page and delete this page */}
+
There are several ways to automate the execution of your data pipelines with Dagster.
The first system, and the most basic, is the [Schedule](/guides/automate/schedules), which responds to time.
@@ -24,8 +26,6 @@ as the schedule is processed.
Schedules were one of the first types of automation in Dagster, created before the introduction of Software-Defined Assets.
As such, you may find that many of the examples can seem foreign if you are used to only working within the asset framework.
-For more on how assets and ops inter-relate, read about [Assets and Ops](/guides/build/assets-concepts#assets-and-ops)
-
The `dagster-daemon` process is responsible for submitting runs by checking each schedule at a regular interval to determine
if it's time to execute the underlying job.
diff --git a/docs/docs-beta/docs/guides/automate/asset-sensors.md b/docs/docs-beta/docs/guides/automate/asset-sensors.md
index 2c71bdaca5bd3..2789290ef9d0c 100644
--- a/docs/docs-beta/docs/guides/automate/asset-sensors.md
+++ b/docs/docs-beta/docs/guides/automate/asset-sensors.md
@@ -1,22 +1,17 @@
---
-title: Triggering cross-job dependencies with Asset Sensors
-sidebar_position: 300
-sidebar_label: Cross-job dependencies
+title: Trigger cross-job dependencies with asset sensors
+sidebar_position: 40
---
Asset sensors in Dagster provide a powerful mechanism for monitoring asset materializations and triggering downstream computations or notifications based on those events.
This guide covers the most common use cases for asset sensors, such as defining cross-job and cross-code location dependencies.
-
-Prerequisites
+:::note
-To follow this guide, you'll need:
+This documentation assumes familiarity with [assets](/guides/build/create-asset-pipelines/assets-concepts/index.md) and [ops and jobs](/guides/build/ops-jobs)
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
-- Familiarity with [Ops and Jobs](/guides/build/ops-jobs)
-
-
+:::
## Getting started
@@ -54,7 +49,7 @@ This is an example of an asset sensor that triggers a job when an asset is mater
-## Customize evaluation logic
+## Customizing the evaluation function of an asset sensor
You can customize the evaluation function of an asset sensor to include specific logic for deciding when to trigger a run. This allows for fine-grained control over the conditions under which downstream jobs are executed.
@@ -83,7 +78,7 @@ In the following example, the `@asset_sensor` decorator defines a custom evaluat
-## Trigger a job with configuration
+## Triggering a job with custom configuration
By providing a configuration to the `RunRequest` object, you can trigger a job with a specific configuration. This is useful when you want to trigger a job with custom parameters based on custom logic you define.
@@ -91,7 +86,7 @@ For example, you might use a sensor to trigger a job when an asset is materializ
-## Monitor multiple assets
+## Monitoring multiple assets
When building a pipeline, you may want to monitor multiple assets with a single sensor. This can be accomplished with a multi-asset sensor.
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation.md b/docs/docs-beta/docs/guides/automate/declarative-automation.md
deleted file mode 100644
index 9a26ff8d2a427..0000000000000
--- a/docs/docs-beta/docs/guides/automate/declarative-automation.md
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: "Declarative automation"
-sidebar_label: "Declarative automation"
-unlisted: true
-sidebar_position: 400
----
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/arbitrary-python-automation-conditions.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/arbitrary-python-automation-conditions.md
new file mode 100644
index 0000000000000..8169a9a425493
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/arbitrary-python-automation-conditions.md
@@ -0,0 +1,5 @@
+---
+title: Arbitrary Python automation conditions
+sidebar_position: 500
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/automation-condition-operands-and-operators.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/automation-condition-operands-and-operators.md
new file mode 100644
index 0000000000000..b8504ea31ab01
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/automation-condition-operands-and-operators.md
@@ -0,0 +1,11 @@
+---
+title: Automation conditions operands and operators
+sidebar_position: 600
+unlisted: true
+---
+
+## Operands
+
+## Operators
+
+## Composite conditions
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/describing-conditions-with-labels.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/describing-conditions-with-labels.md
new file mode 100644
index 0000000000000..f466c42afc228
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/describing-conditions-with-labels.md
@@ -0,0 +1,5 @@
+---
+title: Describing conditions with labels
+sidebar_position: 100
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/ignoring-missing-upstream-data.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/ignoring-missing-upstream-data.md
new file mode 100644
index 0000000000000..ae56d39609ae3
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/ignoring-missing-upstream-data.md
@@ -0,0 +1,5 @@
+---
+title: Ignoring missing upstream data
+sidebar_position: 200
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/index.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/index.md
new file mode 100644
index 0000000000000..c49add6466871
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/index.md
@@ -0,0 +1,11 @@
+---
+title: Customizing automation conditions
+sidebar_position: 10
+unlisted: true
+---
+
+Each `AutomationCondition` consists of a set of operands and various operators that you can combine to suit your needs. For a full list of operands, operators, and composition conditions, see [Automation condition operands and operators](automation-condition-operands-and-operators).
+
+import DocCardList from '@theme/DocCardList';
+
+
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/targeting-upstream-dependencies.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/targeting-upstream-dependencies.md
new file mode 100644
index 0000000000000..8d40442e6f2de
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/targeting-upstream-dependencies.md
@@ -0,0 +1,5 @@
+---
+title: Targeting upstream dependencies
+sidebar_position: 400
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/updating-older-time-partitions.md b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/updating-older-time-partitions.md
new file mode 100644
index 0000000000000..a936e299808a2
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/customizing-automation-conditions/updating-older-time-partitions.md
@@ -0,0 +1,5 @@
+---
+title: Updating older time partitions
+sidebar_position: 300
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/index.md b/docs/docs-beta/docs/guides/automate/declarative-automation/index.md
new file mode 100644
index 0000000000000..63ddcd8b761a9
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/index.md
@@ -0,0 +1,13 @@
+---
+title: Declarative automation
+sidebar_position: 20
+unlisted: true
+---
+
+{/* TODO: What is DA, when would you use it */}
+
+## Automation conditions
+
+## Sensors
+
+{/* The fact that you need to enable the automation condition sensor in the Dagster UI needs to be mentioned much earlier--as is, the current page is confusing. */}
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/declarative-automation/migrating-to-declarative-automation.md b/docs/docs-beta/docs/guides/automate/declarative-automation/migrating-to-declarative-automation.md
new file mode 100644
index 0000000000000..64832e0030c18
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/declarative-automation/migrating-to-declarative-automation.md
@@ -0,0 +1,5 @@
+---
+title: Migrating to Declarative Automation
+sidebar_position: 100
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/graphql-triggers.md b/docs/docs-beta/docs/guides/automate/graphql-triggers.md
new file mode 100644
index 0000000000000..59b5328225b2e
--- /dev/null
+++ b/docs/docs-beta/docs/guides/automate/graphql-triggers.md
@@ -0,0 +1,5 @@
+---
+title: GraphQL triggers
+sidebar_position: 50
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/automate/index.md b/docs/docs-beta/docs/guides/automate/index.md
index a10399054e850..2bd067aa64309 100644
--- a/docs/docs-beta/docs/guides/automate/index.md
+++ b/docs/docs-beta/docs/guides/automate/index.md
@@ -1,7 +1,6 @@
---
title: "Automating pipelines"
description: Learn how to automate your data pipelines.
-sidebar_position: 40
sidebar_class_name: hidden
---
@@ -12,7 +11,7 @@ Automation is key to building reliable, efficient data pipelines. This guide pro
Before continuing, you should be familiar with:
-- [Asset definitions](/guides/build/assets-concepts/index.mdx
+- [Asset definitions](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- [Jobs](/guides/build/ops-jobs)
diff --git a/docs/docs-beta/docs/guides/automate/schedules.md b/docs/docs-beta/docs/guides/automate/schedules.md
index 3d749758ecb87..2f4a31d0d3941 100644
--- a/docs/docs-beta/docs/guides/automate/schedules.md
+++ b/docs/docs-beta/docs/guides/automate/schedules.md
@@ -1,7 +1,6 @@
---
-title: "Schedule cron-based pipelines"
-sidebar_label: "Schedules"
-sidebar_position: 100
+title: "Automate jobs with schedules"
+sidebar_position: 10
---
Schedules enable automated execution of jobs at specified intervals. These intervals can range from common frequencies like hourly, daily, or weekly, to more intricate patterns defined using cron expressions.
@@ -11,7 +10,7 @@ Schedules enable automated execution of jobs at specified intervals. These inter
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- Familiarity with [Ops and Jobs](/guides/build/ops-jobs/index.md)
@@ -41,7 +40,7 @@ If using partitions and jobs, you can create a schedule using the partition with
-If you have a [partitioned asset](/guides/build/create-a-pipeline/partitioning) and job:
+If you have a [partitioned asset](/guides/build/create-asset-pipelines/partitioning) and job:
diff --git a/docs/docs-beta/docs/guides/automate/sensors.md b/docs/docs-beta/docs/guides/automate/sensors.md
index 71be08e940220..9158ba523da6f 100644
--- a/docs/docs-beta/docs/guides/automate/sensors.md
+++ b/docs/docs-beta/docs/guides/automate/sensors.md
@@ -1,7 +1,6 @@
---
-title: Creating event-based pipelines with sensors
-sidebar_label: Event triggers
-sidebar_position: 200
+title: Create event-based pipelines with sensors
+sidebar_position: 30
---
Sensors enable you to trigger Dagster runs in response to events from external systems. They run at regular intervals, either triggering a run or explaining why a run was skipped. For example, you can trigger a run when a new file is added to an Amazon S3 bucket or when a database row is updated.
@@ -15,7 +14,7 @@ An alternative to polling with sensors is to push events to Dagster using the [D
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- Familiarity with [Ops and Jobs](/guides/build/ops-jobs)
diff --git a/docs/docs-beta/docs/guides/build/backfill.md b/docs/docs-beta/docs/guides/build/backfill.md
deleted file mode 100644
index 8dbffbfc8197e..0000000000000
--- a/docs/docs-beta/docs/guides/build/backfill.md
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: Backfilling data
-sidebar_label: "Backfilling"
-unlisted: true
-sidebar_position: 200
----
diff --git a/docs/docs-beta/docs/guides/build/components.md b/docs/docs-beta/docs/guides/build/components.md
index dbcc57d260ac7..3f380bfc6ed48 100644
--- a/docs/docs-beta/docs/guides/build/components.md
+++ b/docs/docs-beta/docs/guides/build/components.md
@@ -7,14 +7,14 @@ Welcome to Dagster Components.
Dagster Components is a new way to structure your Dagster projects. It aims to provide:
-- An opinionated project layout that supports ongoing scaffolding from “Hello world” to the most advanced projects
+- An opinionated project layout that supports ongoing scaffolding from "Hello world" to the most advanced projects
- A class-based interface for dynamically constructing definitions
- A toolkit to build YAML DSL frontends for components so that components can be constructed in a low-code fashion.
- A format for components to provide their own scaffolding, in order to organize and reference integration-specific artifacts files.
## Project Setup
-First let's install the `dg` command line tool. This lives in the published Python package `dagster-dg`. `dg` is designed to be globally installed and has no dependency on `dagster` itself. We will use the tool feature of Python package manager `uv` to install a globally available `dg`. `dg` will also be use `uv` internally to manage the python enviroment associated with your project.
+First let's install the `dg` command line tool. This lives in the published Python package `dagster-dg`. `dg` is designed to be globally installed and has no dependency on `dagster` itself. We will use the tool feature of Python package manager `uv` to install a globally available `dg`. `dg` will also be use `uv` internally to manage the Python environment associated with your project.
```bash
brew install uv && uv tool install -e -e $DAGSTER_GIT_REPO_DIR/python_modules/libraries/dagster-dg/
@@ -67,7 +67,7 @@ You can see that we have a basic project structure with a few non-standard files
## Hello Platform
-We are going to set up a data platform using sling to ingest data, dbt to process the data, and python to do AI.
+We are going to set up a data platform using sling to ingest data, dbt to process the data, and Python to do AI.
### Ingest
@@ -80,7 +80,7 @@ dagster_components.pipes_subprocess_script_collection
Assets that wrap Python scripts executed with Dagster's PipesSubprocessClient.
```
-This is because the basic `dagster-components` package is lightweight and doesn't include copmonents for specific tools. We can get access to a `sling` component by installing the `sling` extra:
+This is because the basic `dagster-components` package is lightweight and doesn't include components for specific tools. We can get access to a `sling` component by installing the `sling` extra:
```bash
uv add 'dagster-components[sling]' dagster-embedded-elt
@@ -137,7 +137,7 @@ params: {}
The `replication.yaml` file is a sling-specific file.
-We want to replicate data on the public internet into duckdb:
+We want to replicate data on the public internet into DuckDB:
```bash
uv run sling conns set DUCKDB type=duckdb instance=/tmp/jaffle_platform.duckdb
@@ -151,7 +151,7 @@ uv run sling conns test DUCKDB
4:55PM INF success!
```
-Now let's download a file locally (sling doesn’t support reading from the public internet):
+Now let's download a file locally (sling doesn't support reading from the public internet):
```bash
curl -O https://raw.githubusercontent.com/dbt-labs/jaffle-shop-classic/refs/heads/main/seeds/raw_customers.csv &&
diff --git a/docs/docs-beta/docs/guides/build/configure/resources.md b/docs/docs-beta/docs/guides/build/configure/resources.md
deleted file mode 100644
index 632cd296f6091..0000000000000
--- a/docs/docs-beta/docs/guides/build/configure/resources.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-title: Using Resources to manage external systems
-sidebar_label: Resources
-unlisted: true
-sidebar_position: 400
----
-
-Dagster resources are objects that provide access to external systems, databases, or services. Resources are used to manage connections to external systems, and are used by Dagster ops and assets.
diff --git a/docs/docs-beta/docs/guides/build/configure/apis.md b/docs/docs-beta/docs/guides/build/connect-to-external-systems/apis.md
similarity index 94%
rename from docs/docs-beta/docs/guides/build/configure/apis.md
rename to docs/docs-beta/docs/guides/build/connect-to-external-systems/apis.md
index 005a19ae99c50..ef82c10c910ad 100644
--- a/docs/docs-beta/docs/guides/build/configure/apis.md
+++ b/docs/docs-beta/docs/guides/build/connect-to-external-systems/apis.md
@@ -1,7 +1,6 @@
---
-title: Connecting to APIs
+title: Connect to APIs
sidebar_position: 600
-sidebar_label: API connections
---
When building a data pipeline, you'll likely need to connect to several external APIs, each with its own specific configuration and behavior. This guide demonstrates how to standardize your API connections and customize their configuration using Dagster resources.
@@ -11,8 +10,8 @@ When building a data pipeline, you'll likely need to connect to several external
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
-- Familiarity with [Resources](/guides/build/configure/resources)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
+- Familiarity with [Resources](/guides/build/connect-to-external-systems/resources)
- To install the `requests` library:
```bash
pip install requests
diff --git a/docs/docs-beta/docs/guides/build/configure/authentication.md b/docs/docs-beta/docs/guides/build/connect-to-external-systems/authentication.md
similarity index 100%
rename from docs/docs-beta/docs/guides/build/configure/authentication.md
rename to docs/docs-beta/docs/guides/build/connect-to-external-systems/authentication.md
diff --git a/docs/docs-beta/docs/guides/build/integrate/cloud-services.md b/docs/docs-beta/docs/guides/build/connect-to-external-systems/cloud-services.md
similarity index 55%
rename from docs/docs-beta/docs/guides/build/integrate/cloud-services.md
rename to docs/docs-beta/docs/guides/build/connect-to-external-systems/cloud-services.md
index 1fdc39f3d75fd..4c3663b72f32f 100644
--- a/docs/docs-beta/docs/guides/build/integrate/cloud-services.md
+++ b/docs/docs-beta/docs/guides/build/connect-to-external-systems/cloud-services.md
@@ -1,5 +1,5 @@
---
-title: Connecting to cloud services
+title: Connect to cloud services
sidebar_position: 600
unlisted: true
---
diff --git a/docs/docs-beta/docs/guides/build/configure/databases.md b/docs/docs-beta/docs/guides/build/connect-to-external-systems/databases.md
similarity index 89%
rename from docs/docs-beta/docs/guides/build/configure/databases.md
rename to docs/docs-beta/docs/guides/build/connect-to-external-systems/databases.md
index 84132316e9c87..2e44ed9466cc6 100644
--- a/docs/docs-beta/docs/guides/build/configure/databases.md
+++ b/docs/docs-beta/docs/guides/build/connect-to-external-systems/databases.md
@@ -1,8 +1,6 @@
---
-title: Connecting to databases
-description: How to configure resources to connect to databases
+title: Connect to a database
sidebar_position: 500
-sidebar_label: Database connections
---
When building a data pipeline, you may need to extract data from or load data into a database. In Dagster, resources can be used to connect to a database by acting as a wrapper around a database client.
@@ -14,7 +12,7 @@ This guide demonstrates how to standardize database connections and customize th
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/create-a-pipeline/data-assets)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/data-assets)
To run the examples in this guide, you'll need:
@@ -59,5 +57,5 @@ You can also fetch environment variables using the `os` library. Dagster treats
## Next steps
-- Explore how to use resources for [Connecting to APIs](/guides/build/configure/apis)
-- Go deeper into [Understanding Resources](/guides/build/configure/resources)
\ No newline at end of file
+- Explore how to use resources for [Connecting to APIs](/guides/build/connect-to-external-systems/apis)
+- Go deeper into [Understanding Resources](/guides/build/connect-to-external-systems/resources)
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/configure/index.mdx b/docs/docs-beta/docs/guides/build/connect-to-external-systems/index.md
similarity index 56%
rename from docs/docs-beta/docs/guides/build/configure/index.mdx
rename to docs/docs-beta/docs/guides/build/connect-to-external-systems/index.md
index 69dae1c64cc29..f9b1e1c94e363 100644
--- a/docs/docs-beta/docs/guides/build/configure/index.mdx
+++ b/docs/docs-beta/docs/guides/build/connect-to-external-systems/index.md
@@ -1,6 +1,5 @@
---
-title: "Configure"
-description: "Configure assets, pipelines, and runs"
+title: "Connect to external systems"
sidebar_position: 20
---
diff --git a/docs/docs-beta/docs/guides/build/connect-to-external-systems/resources.md b/docs/docs-beta/docs/guides/build/connect-to-external-systems/resources.md
new file mode 100644
index 0000000000000..804cb6e9113f6
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/connect-to-external-systems/resources.md
@@ -0,0 +1,7 @@
+---
+title: Connect to external systems with Resources
+unlisted: true
+sidebar_position: 100
+---
+
+Dagster resources are objects that provide access to external systems, databases, or services. Resources are used to manage connections to external systems, and are used by Dagster assets and ops.
diff --git a/docs/docs-beta/docs/guides/build/assets-concepts/asset-dependencies.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-dependencies.md
similarity index 97%
rename from docs/docs-beta/docs/guides/build/assets-concepts/asset-dependencies.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-dependencies.md
index 20495ba819d7f..f5a387a8c8572 100644
--- a/docs/docs-beta/docs/guides/build/assets-concepts/asset-dependencies.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-dependencies.md
@@ -1,7 +1,7 @@
---
title: Pass data between assets
description: Learn how to pass data between assets in Dagster
-sidebar_position: 200
+sidebar_position: 400
---
In Dagster, assets are the building blocks of your data pipeline and it's common to want to pass data between them. This guide will help you understand how to pass data between assets.
@@ -54,7 +54,7 @@ I/O managers handle:
1. **Input**: Reading data from storage and loading it into memory for use by dependent assets.
2. **Output**: Writing data to the configured storage location.
-For a deeper understanding of I/O managers, check out the [Understanding I/O managers](/guides/build/configure/io-managers) guide.
+For a deeper understanding of I/O managers, check out the [Understanding I/O managers](/guides/operate/io-managers) guide.
@@ -91,7 +91,7 @@ Consider this example:
This example downloads a zip file from Google Drive, unzips it, and loads the data into a Pandas DataFrame. It relies on each asset running on the same file system to perform these operations.
-The assets are modeled as tasks, rather than as data assets. For more information on the difference between tasks and data assets, check out the [assets guide](/guides/build/assets-concepts/index.md).
+The assets are modeled as tasks, rather than as data assets. For more information on the difference between tasks and data assets, check out the [assets guide](/guides/build/create-asset-pipelines/assets-concepts/).
In this refactor, the `download_files`, `unzip_files`, and `load_data` assets are combined into a single asset, `my_dataset`. This asset downloads the files, unzips them, and loads the data into a data warehouse.
diff --git a/docs/docs-beta/docs/guides/build/assets-concepts/asset-factories-with-deps.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-factories-with-deps.md
similarity index 87%
rename from docs/docs-beta/docs/guides/build/assets-concepts/asset-factories-with-deps.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-factories-with-deps.md
index 07de1116baee6..468b48b38fa2b 100644
--- a/docs/docs-beta/docs/guides/build/assets-concepts/asset-factories-with-deps.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-factories-with-deps.md
@@ -1,7 +1,7 @@
---
title: 'Programmatically defining dependencies using asset factories'
-sidebar_position: 400
-sidebar_label: 'Asset Factories (2)'
+sidebar_position: 500
+sidebar_label: 'Asset factories with dependencies'
---
In data engineering, it's often helpful to reuse code to define similar assets. For example, you may want to represent every file in a directory as an asset.
@@ -10,11 +10,11 @@ Additionally, you may be serving stakeholders who aren't familiar with Python or
Using an asset factory reduces complexity and creates a pluggable entry point to define additional assets.
-
- Prerequisites
+:::note
-This guide builds upon the concepts in the [asset factories](/guides/build/configure/asset-factories) tutorial.
-
+This guide builds on the concepts in the [asset factories](asset-factories) documentation.
+
+:::
---
diff --git a/docs/docs-beta/docs/guides/build/configure/asset-factories.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-factories.md
similarity index 95%
rename from docs/docs-beta/docs/guides/build/configure/asset-factories.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-factories.md
index 9f7c3e687cb37..c6a0f87bdc330 100644
--- a/docs/docs-beta/docs/guides/build/configure/asset-factories.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-factories.md
@@ -19,8 +19,8 @@ The asset factory pattern can solve both of these problems.
To follow the steps in this guide, you'll need:
- Familiarity with:
- - [Assets](/guides/build/create-a-pipeline/data-assets)
- - [Resources](/guides/build/configure/resources)
+ - [Assets](/guides/build/create-asset-pipelines/data-assets)
+ - [Resources](/guides/build/connect-to-external-systems/resources)
- SQL, YAML and Amazon Web Services (AWS) S3
- [Pydantic](https://docs.pydantic.dev/latest/) and [Jinja2](https://jinja.palletsprojects.com/en/3.1.x/)
- A Python virtual environment with the following dependencies installed:
diff --git a/docs/docs-beta/docs/guides/build/assets-concepts/asset-materialization.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-materialization.md
similarity index 54%
rename from docs/docs-beta/docs/guides/build/assets-concepts/asset-materialization.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-materialization.md
index 4bb472f5c7884..e33fbcbe51d65 100644
--- a/docs/docs-beta/docs/guides/build/assets-concepts/asset-materialization.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-materialization.md
@@ -1,7 +1,6 @@
---
title: "Asset materialization"
-sidebar_position: 300
+sidebar_position: 200
unlisted: true
---
-# Asset materialization
diff --git a/docs/docs-beta/docs/guides/build/assets-concepts/asset-metadata.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-metadata.md
similarity index 55%
rename from docs/docs-beta/docs/guides/build/assets-concepts/asset-metadata.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-metadata.md
index 276b764359664..022164ac12efd 100644
--- a/docs/docs-beta/docs/guides/build/assets-concepts/asset-metadata.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/asset-metadata.md
@@ -1,7 +1,6 @@
---
title: "Asset metadata"
-sidebar_position: 100
+sidebar_position: 300
unlisted: true
---
-# Asset metadata
diff --git a/docs/docs-beta/docs/guides/build/configure/configuring-assets.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/configuring-assets.md
similarity index 84%
rename from docs/docs-beta/docs/guides/build/configure/configuring-assets.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/configuring-assets.md
index da028f5761cc6..23d425b0574be 100644
--- a/docs/docs-beta/docs/guides/build/configure/configuring-assets.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/configuring-assets.md
@@ -13,7 +13,7 @@ You'll often want to be able to adjust parameters when materializing assets, whi
To follow the steps in this guide, you'll need familiarity with:
-- [Assets](/guides/build/create-a-pipeline/data-assets)
+- [Assets](/guides/build/create-asset-pipelines/data-assets)
- [Pydantic](https://docs.pydantic.dev/latest/)
@@ -44,5 +44,5 @@ This will open the Launchpad, where you can scaffold the config, customize its v
## Next steps
-- Learn more about Dagster [assets](/guides/build/assets-concepts/index.mdx
-- Connect to external [APIs](/guides/build/configure/apis) and [databases](/guides/build/configure/databases) with resources
+- Learn more about Dagster [assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
+- Connect to external [APIs](/guides/build/connect-to-external-systems/apis) and [databases](/guides/build/connect-to-external-systems/databases) with resources
diff --git a/docs/docs-beta/docs/guides/build/assets-concepts/index.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/index.md
similarity index 78%
rename from docs/docs-beta/docs/guides/build/assets-concepts/index.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/index.md
index 9e2f302b468a0..5019d27775ae9 100644
--- a/docs/docs-beta/docs/guides/build/assets-concepts/index.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/index.md
@@ -1,10 +1,8 @@
---
title: "Assets concepts"
-sidebar_position: 40
-sidebar_class_name: hidden
+sidebar_position: 20
---
-TODO
## Assets and ops
diff --git a/docs/docs-beta/docs/guides/build/assets-concepts/selection-syntax.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/selection-syntax.md
similarity index 99%
rename from docs/docs-beta/docs/guides/build/assets-concepts/selection-syntax.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/selection-syntax.md
index 1676ed5af6a80..4c0646ec51446 100644
--- a/docs/docs-beta/docs/guides/build/assets-concepts/selection-syntax.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/assets-concepts/selection-syntax.md
@@ -1,6 +1,6 @@
---
title: 'Asset selection syntax'
-sidebar_position: 500
+sidebar_position: 600
sidebar_label: 'Asset selection syntax'
---
diff --git a/docs/docs-beta/docs/guides/build/create-a-pipeline/data-assets.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/data-assets.md
similarity index 92%
rename from docs/docs-beta/docs/guides/build/create-a-pipeline/data-assets.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/data-assets.md
index 5b37ffaaf1ec0..868b3e4641c40 100644
--- a/docs/docs-beta/docs/guides/build/create-a-pipeline/data-assets.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/data-assets.md
@@ -71,6 +71,6 @@ flowchart LR
## Next steps
-- Learn to create [dependencies between assets](/guides/build/assets-concepts/asset-dependencies)
-- Enrich Dagster's built-in data catalog with [asset metadata](/guides/build/create-a-pipeline/metadata)
-- Learn to use a [factory pattern](/guides/build/configure/asset-factories) to create multiple, similar assets
+- Enrich Dagster's built-in data catalog with [asset metadata](/guides/build/create-asset-pipelines/metadata)
+- Learn to create [dependencies between assets](/guides/build/create-asset-pipelines/assets-concepts/asset-dependencies)
+- Learn to use a [factory pattern](/guides/build/create-asset-pipelines/assets-concepts/asset-factories) to create multiple, similar assets
diff --git a/docs/docs-beta/docs/guides/build/create-a-pipeline/external-assets.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/external-assets.md
similarity index 98%
rename from docs/docs-beta/docs/guides/build/create-a-pipeline/external-assets.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/external-assets.md
index 482539981cc84..f958e17ca4532 100644
--- a/docs/docs-beta/docs/guides/build/create-a-pipeline/external-assets.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/external-assets.md
@@ -21,7 +21,7 @@ For example, external assets could be:
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/create-a-pipeline/data-assets)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/data-assets)
- Familiarity with [Sensors](/guides/automate/sensors)
diff --git a/docs/docs-beta/docs/guides/build/create-a-pipeline/index.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/index.md
similarity index 83%
rename from docs/docs-beta/docs/guides/build/create-a-pipeline/index.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/index.md
index db67cd5184f1d..4f14323e37cb0 100644
--- a/docs/docs-beta/docs/guides/build/create-a-pipeline/index.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/index.md
@@ -1,5 +1,5 @@
---
-title: "Create a pipeline"
+title: "Create asset pipelines"
description: "Learn how to create data pipelines using Dagster's asset-based approach"
sidebar_position: 10
---
@@ -46,10 +46,10 @@ Most Dagster pipelines follow these steps:
To start building your Dagster pipeline, dive into the following guides:
-- [Define data assets](/guides/build/create-a-pipeline/data-assets)
-- [Create dependencies between assets](/guides/build/assets-concepts/asset-dependencies)
-- [Enrich assets with metadata](/guides/build/create-a-pipeline/metadata)
-- [Partition assets](/guides/build/create-a-pipeline/partitioning)
-- [Represent external data sources](/guides/build/create-a-pipeline/external-assets)
+- [Define data assets](/guides/build/create-asset-pipelines/data-assets)
+- [Create dependencies between assets](/guides/build/create-asset-pipelines/assets-concepts/asset-dependencies)
+- [Enrich assets with metadata](/guides/build/create-asset-pipelines/metadata)
+- [Partition assets](/guides/build/create-asset-pipelines/partitioning)
+- [Represent external data sources](/guides/build/create-asset-pipelines/external-assets)
By following these guides, you'll learn how to create powerful, maintainable data pipelines using Dagster's asset-based approach.
diff --git a/docs/docs-beta/docs/guides/build/create-a-pipeline/metadata.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/metadata.md
similarity index 96%
rename from docs/docs-beta/docs/guides/build/create-a-pipeline/metadata.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/metadata.md
index 94f420a438669..b6f0222176adf 100644
--- a/docs/docs-beta/docs/guides/build/create-a-pipeline/metadata.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/metadata.md
@@ -19,7 +19,7 @@ Using metadata in Dagster, you can:
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/create-a-pipeline/data-assets)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/data-assets)
## Adding owners to assets \{#owners}
@@ -62,7 +62,7 @@ Using definition metadata to describe assets can make it easy to provide context
-To learn more about the different types of metadata you can attach, see the [`MetadataValue`](/todo) API docs.
+To learn more about the different types of metadata you can attach, see the [`MetadataValue`](/api/python-api/metadata#class dagster.MetadataValue) API docs.
Some metadata keys will be given special treatment in the Dagster UI. See the [Standard metadata types](#standard-metadata-types) section for more information.
@@ -147,7 +147,7 @@ Dagster can automatically attach code references to assets during local developm
### Customizing code references \{#custom-references}
-If you want to customize how code references are attached - such as when you are building [domain-specific languages with asset factories](/guides/build/configure/asset-factories) - you can manually add the `dagster/code_references` metadata to asset definitions:
+If you want to customize how code references are attached - such as when you are building [domain-specific languages with asset factories](/guides/build/create-asset-pipelines/assets-concepts/asset-factories) - you can manually add the `dagster/code_references` metadata to asset definitions:
diff --git a/docs/docs-beta/docs/guides/build/create-a-pipeline/partition-dependencies.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/partition-dependencies.md
similarity index 95%
rename from docs/docs-beta/docs/guides/build/create-a-pipeline/partition-dependencies.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/partition-dependencies.md
index d6ee59ba03478..14be62d3eb393 100644
--- a/docs/docs-beta/docs/guides/build/create-a-pipeline/partition-dependencies.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/partition-dependencies.md
@@ -19,8 +19,8 @@ Partitioned assets in Dagster can have dependencies on other partitioned assets,
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/create-a-pipeline/data-assets)
-- Familiarity with [Partitions](/guides/build/create-a-pipeline/partitioning)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/data-assets)
+- Familiarity with [Partitions](/guides/build/create-asset-pipelines/partitioning)
diff --git a/docs/docs-beta/docs/guides/build/create-a-pipeline/partitioning.md b/docs/docs-beta/docs/guides/build/create-asset-pipelines/partitioning.md
similarity index 97%
rename from docs/docs-beta/docs/guides/build/create-a-pipeline/partitioning.md
rename to docs/docs-beta/docs/guides/build/create-asset-pipelines/partitioning.md
index e82e16a28c601..b7af703bf186f 100644
--- a/docs/docs-beta/docs/guides/build/create-a-pipeline/partitioning.md
+++ b/docs/docs-beta/docs/guides/build/create-asset-pipelines/partitioning.md
@@ -19,7 +19,7 @@ There are several ways to partition your data in Dagster:
To follow the steps in this guide, you'll need:
-- Familiarity with [Assets](/guides/build/create-a-pipeline/data-assets)
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/data-assets)
diff --git a/docs/docs-beta/docs/guides/build/index.md b/docs/docs-beta/docs/guides/build/index.md
deleted file mode 100644
index 9551dba1946eb..0000000000000
--- a/docs/docs-beta/docs/guides/build/index.md
+++ /dev/null
@@ -1,7 +0,0 @@
----
-title: "Building pipelines"
-sidebar_position: 30
-sidebar_class_name: hidden
----
-
-This section is about building asset pipelines.
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/index.mdx b/docs/docs-beta/docs/guides/build/index.mdx
new file mode 100644
index 0000000000000..7e2a7a125d660
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/index.mdx
@@ -0,0 +1,8 @@
+---
+title: "Build pipelines"
+sidebar_class_name: hidden
+---
+
+import DocCardList from '@theme/DocCardList';
+
+
diff --git a/docs/docs-beta/docs/guides/build/integrate/build-your-own.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/build-your-own.md
similarity index 100%
rename from docs/docs-beta/docs/guides/build/integrate/build-your-own.md
rename to docs/docs-beta/docs/guides/build/integrate-with-external-systems/build-your-own.md
diff --git a/docs/docs-beta/docs/guides/build/integrate/index.mdx b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/index.md
similarity index 69%
rename from docs/docs-beta/docs/guides/build/integrate/index.mdx
rename to docs/docs-beta/docs/guides/build/integrate-with-external-systems/index.md
index 3aa86ed1f629e..98f5cd0a6c106 100644
--- a/docs/docs-beta/docs/guides/build/integrate/index.mdx
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/index.md
@@ -1,5 +1,5 @@
---
-title: "Integrate"
+title: "Integrate with external systems"
sidebar_position: 30
---
diff --git a/docs/docs-beta/docs/guides/build/integrate/ingesting-data.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/ingesting-data.md
similarity index 95%
rename from docs/docs-beta/docs/guides/build/integrate/ingesting-data.md
rename to docs/docs-beta/docs/guides/build/integrate-with-external-systems/ingesting-data.md
index 7d4d0467ff063..837c4e9c46be3 100644
--- a/docs/docs-beta/docs/guides/build/integrate/ingesting-data.md
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/ingesting-data.md
@@ -18,7 +18,7 @@ Dagster is often used to orchestrate the ingestion of data into a data warehouse
To follow this guide, you'll need:
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
## How Dagster supports data ingestion
@@ -48,5 +48,5 @@ For example, imagine there's a CSV file of counties on the internet and you want
## Next steps
-- Transform data using [Dagster's dbt integration](/guides/build/integrate/transform-dbt)
+- Transform data using [Dagster's dbt integration](/guides/build/integrate-with-external-systems/transform-dbt)
- Use asset checks [to test data quality](/guides/test/asset-checks) and [freshness](/guides/test/data-freshness-testing)
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-ecs-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-ecs-pipeline.md
new file mode 100644
index 0000000000000..6f78f00bd50f8
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-ecs-pipeline.md
@@ -0,0 +1,7 @@
+---
+title: Build pipelines with AWS ECS
+sidebar_position: 200
+unlisted: true
+---
+
+TK
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-emr-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-emr-pipeline.md
new file mode 100644
index 0000000000000..a04ceab320e74
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-emr-pipeline.md
@@ -0,0 +1,5 @@
+---
+title: Build pipelines with AWS EMR
+sidebar_position: 300
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-emr-serverless-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-emr-serverless-pipeline.md
new file mode 100644
index 0000000000000..42d3c063a5f11
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-emr-serverless-pipeline.md
@@ -0,0 +1,5 @@
+---
+title: Build pipelines with AWS EMR Serverless
+sidebar_position: 300
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-glue-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-glue-pipeline.md
new file mode 100644
index 0000000000000..0654a11b6473e
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-glue-pipeline.md
@@ -0,0 +1,5 @@
+---
+title: Build pipelines with AWS Glue
+sidebar_position: 400
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-lambda-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-lambda-pipeline.md
new file mode 100644
index 0000000000000..849167276e3d3
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/aws-lambda-pipeline.md
@@ -0,0 +1,5 @@
+---
+title: Build pipelines with AWS Lambda
+sidebar_position: 500
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/databricks-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/databricks-pipeline.md
new file mode 100644
index 0000000000000..f3ba8d52ab213
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/databricks-pipeline.md
@@ -0,0 +1,5 @@
+---
+title: Build pipelines with Databricks
+sidebar_position: 600
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate/pipes.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/index.md
similarity index 84%
rename from docs/docs-beta/docs/guides/build/integrate/pipes.md
rename to docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/index.md
index 39a6152f256f2..7c254a27b617e 100644
--- a/docs/docs-beta/docs/guides/build/integrate/pipes.md
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/index.md
@@ -1,20 +1,17 @@
---
-title: "Executing code outside of Dagster with Pipes"
-sidebar_position: 500
-sidebar_label: "External execution with Pipes"
+title: Dagster Pipes
+sidebar_position: 10
---
-# Executing code outside of Dagster with Pipes
-
-Dagster Pipes provides a powerful mechanism for invoking code outside of Dagster, while providing all the benefits of scheduling, reporting, and observability of native Dagster pipelines.
+Dagster Pipes provides a powerful mechanism for invoking code outside of Dagster, while providing all the benefits of scheduling, reporting, and observability of native Dagster pipelines. While Dagster is written in Python, you can run code in other languages and send information back to Dagster.
In this guide, we'll walk you through how to invoke non-Dagster code through Pipes.
-
-Prerequisites
+:::note
+
+This documentation assumes familiarity with [Dagster assets](/guides/build/create-asset-pipelines/assets-concepts).
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
-
+:::
## Setting up an asset that invokes your external code
diff --git a/docs/docs-beta/docs/guides/build/integrate/non-python.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/javascript-pipeline.md
similarity index 88%
rename from docs/docs-beta/docs/guides/build/integrate/non-python.md
rename to docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/javascript-pipeline.md
index 1f2618b62e259..ac97317f2c8cc 100644
--- a/docs/docs-beta/docs/guides/build/integrate/non-python.md
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/javascript-pipeline.md
@@ -1,11 +1,8 @@
---
-title: "Using Dagster Pipes to execute non-Python languages"
-sidebar_label: "Dagster Pipes"
-sidebar_position: 300
+title: "Build pipelines in JavaScript"
+sidebar_position: 100
---
-Dagster is written in Python, but that doesn't mean it's that Python is the only language that can be used when materializing assets. With Dagster Pipes, you can run code in other languages and send information back to Dagster.
-
This guide covers how to run JavaScript with Dagster using Pipes, however, the same principle will apply to other languages.
@@ -13,7 +10,7 @@ This guide covers how to run JavaScript with Dagster using Pipes, however, the s
To follow this guide, you'll need:
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- A basic understanding of JavaScript and Node.js
To run the examples, you'll need to install:
diff --git a/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/kubernetes-pipeline.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/kubernetes-pipeline.md
new file mode 100644
index 0000000000000..3a188fb039239
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/pipes/kubernetes-pipeline.md
@@ -0,0 +1,5 @@
+---
+title: Build pipelines with Kubernetes
+sidebar_position: 700
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/integrate/transform-dbt.md b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/transform-dbt.md
similarity index 98%
rename from docs/docs-beta/docs/guides/build/integrate/transform-dbt.md
rename to docs/docs-beta/docs/guides/build/integrate-with-external-systems/transform-dbt.md
index 55dcf07232edb..7013085bae880 100644
--- a/docs/docs-beta/docs/guides/build/integrate/transform-dbt.md
+++ b/docs/docs-beta/docs/guides/build/integrate-with-external-systems/transform-dbt.md
@@ -1,7 +1,6 @@
---
title: Transforming data with dbt
sidebar_position: 200
-sidebar_label: Transform data with dbt
---
Dagster orchestrates dbt alongside other technologies, so you can schedule dbt with Spark, Python, etc. in a single data pipeline. Dagster's asset-oriented approach allows Dagster to understand dbt at the level of individual dbt models.
diff --git a/docs/docs-beta/docs/guides/build/ops-jobs/index.md b/docs/docs-beta/docs/guides/build/ops-jobs/index.md
index c03c4a7beccb6..abd1dd71102a2 100644
--- a/docs/docs-beta/docs/guides/build/ops-jobs/index.md
+++ b/docs/docs-beta/docs/guides/build/ops-jobs/index.md
@@ -1,10 +1,9 @@
---
-unlisted: true
+title: Ops and jobs
sidebar_position: 50
+unlisted: true
---
-# Ops and jobs
-
import DocCardList from '@theme/DocCardList';
diff --git a/docs/docs-beta/docs/guides/build/ops-jobs/job-configuration.md b/docs/docs-beta/docs/guides/build/ops-jobs/job-configuration.md
index c15ec31bab52e..5d74230314cd8 100644
--- a/docs/docs-beta/docs/guides/build/ops-jobs/job-configuration.md
+++ b/docs/docs-beta/docs/guides/build/ops-jobs/job-configuration.md
@@ -3,5 +3,3 @@ title: "Job configuration"
sidebar_position: 200
unlisted: true
---
-
-# Job configuration
diff --git a/docs/docs-beta/docs/guides/build/ops-jobs/ops-vs-assets.md b/docs/docs-beta/docs/guides/build/ops-jobs/ops-vs-assets.md
index d39a086dd74f8..e2717cd622e35 100644
--- a/docs/docs-beta/docs/guides/build/ops-jobs/ops-vs-assets.md
+++ b/docs/docs-beta/docs/guides/build/ops-jobs/ops-vs-assets.md
@@ -3,5 +3,3 @@ title: "Ops vs. assets"
sidebar_position: 100
unlisted: true
---
-
-# Ops vs. assets
diff --git a/docs/docs-beta/docs/guides/deploy/execution/run-executors.md b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/backfills.md
similarity index 52%
rename from docs/docs-beta/docs/guides/deploy/execution/run-executors.md
rename to docs/docs-beta/docs/guides/build/partition-and-backfill-data/backfills.md
index 7351a18aefd9d..d793707232c74 100644
--- a/docs/docs-beta/docs/guides/deploy/execution/run-executors.md
+++ b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/backfills.md
@@ -1,7 +1,5 @@
---
-title: "Run executors"
+title: Backfill data
sidebar_position: 300
unlisted: true
---
-
-# Run executors
diff --git a/docs/docs-beta/docs/guides/build/partition-and-backfill-data/index.md b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/index.md
new file mode 100644
index 0000000000000..9f7c421786624
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/index.md
@@ -0,0 +1,9 @@
+---
+title: Partition and backfill data
+sidebar_position: 40
+unlisted: true
+---
+
+import DocCardList from '@theme/DocCardList';
+
+
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/partition-and-backfill-data/partitions.md b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/partitions.md
new file mode 100644
index 0000000000000..660809e7c362e
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/partitions.md
@@ -0,0 +1,5 @@
+---
+title: Partition data
+sidebar_position: 100
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/partition-and-backfill-data/testing-partitions.md b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/testing-partitions.md
new file mode 100644
index 0000000000000..88884a857288b
--- /dev/null
+++ b/docs/docs-beta/docs/guides/build/partition-and-backfill-data/testing-partitions.md
@@ -0,0 +1,5 @@
+---
+title: Testing partitions
+sidebar_position: 200
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/project-structure.md b/docs/docs-beta/docs/guides/build/project-structure.md
index 6a9d638d05856..2a150985a4bc6 100644
--- a/docs/docs-beta/docs/guides/build/project-structure.md
+++ b/docs/docs-beta/docs/guides/build/project-structure.md
@@ -1,10 +1,8 @@
---
-title: "How to structure your Dagster project"
+title: "Structuring your Dagster project"
sidebar_position: 200
---
-# How to structure your Dagster project
-
:::note
Refer to the project scaffolding tutorial to learn how to create a new Dagster project.
:::
diff --git a/docs/docs-beta/docs/guides/deploy/code-locations/index.mdx b/docs/docs-beta/docs/guides/deploy/code-locations/index.md
similarity index 100%
rename from docs/docs-beta/docs/guides/deploy/code-locations/index.mdx
rename to docs/docs-beta/docs/guides/deploy/code-locations/index.md
diff --git a/docs/docs-beta/docs/guides/deploy/code-locations/managing-code-locations.md b/docs/docs-beta/docs/guides/deploy/code-locations/managing-code-locations.md
index b44a57a85cad4..5a794092e8dfb 100644
--- a/docs/docs-beta/docs/guides/deploy/code-locations/managing-code-locations.md
+++ b/docs/docs-beta/docs/guides/deploy/code-locations/managing-code-locations.md
@@ -1,6 +1,5 @@
---
title: "Managing code locations with Definitions"
+sidebar_position: 100
unlisted: true
---
-
-# Manage code locations
diff --git a/docs/docs-beta/docs/guides/deploy/code-locations/workspace-yaml.md b/docs/docs-beta/docs/guides/deploy/code-locations/workspace-yaml.md
index 4ee95d7f1a0eb..d0d1db821e783 100644
--- a/docs/docs-beta/docs/guides/deploy/code-locations/workspace-yaml.md
+++ b/docs/docs-beta/docs/guides/deploy/code-locations/workspace-yaml.md
@@ -1,18 +1,17 @@
---
title: "workspace.yaml reference"
+sidebar_position: 200
---
-# Workspace file reference
-
:::info
- This reference is only applicable to Dagster OSS. For Dagster Cloud see [the Dagster Cloud Code Locations guide](/dagster-plus/deployment/code-locations)
+ This reference is only applicable to Dagster OSS. For Dagster Cloud see [the Dagster Cloud Code Locations documentation](/dagster-plus/deployment/code-locations)
:::
The `workspace.yaml` file is used to configure code locations in Dagster. It tells Dagster where to find your code and how to load it.
## Location of workspace.yaml
-Dagster command-line tools (like dagster dev, dagster-webserver, or dagster-daemon run) look for workspace files in the current directory when invoked. This allows you to launch from that directory without the need for command line arguments
+Dagster command-line tools (like `dagster dev`, `dagster-webserver`, or `dagster-daemon run`) look for workspace files in the current directory when invoked. This allows you to launch from that directory without the need for command line arguments
To load the workspace.yaml file from a different folder, use the -w argument:
diff --git a/docs/docs-beta/docs/guides/deploy/dagster-yaml.md b/docs/docs-beta/docs/guides/deploy/dagster-yaml.md
index d7f628b9f533a..a03eb23289fcd 100644
--- a/docs/docs-beta/docs/guides/deploy/dagster-yaml.md
+++ b/docs/docs-beta/docs/guides/deploy/dagster-yaml.md
@@ -1,9 +1,7 @@
---
-title: "dagster.yaml reference"
+title: 'dagster.yaml reference'
sidebar_position: 200
-----
-
-# dagster.yaml reference
+---
The `dagster.yaml` file is used to configure the Dagster instance. It defines various settings for storage, run execution, logging, and other aspects of a Dagster deployment.
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/amazon-web-services.md b/docs/docs-beta/docs/guides/deploy/deployment-options/amazon-web-services.md
index d425e912d4cd3..36968a9c47893 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/amazon-web-services.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/amazon-web-services.md
@@ -1,5 +1,6 @@
---
-title: "Deploy to Amazon Web Services"
+title: Deploying to Amazon Web Services
+sidebar_label: "Amazon Web Services"
unlisted: true
sidebar_position: 500
---
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/dagster-service.md b/docs/docs-beta/docs/guides/deploy/deployment-options/dagster-service.md
index fcb65fafa58f9..250e43ae9eff9 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/dagster-service.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/dagster-service.md
@@ -1,5 +1,6 @@
---
-title: 'Deploy Dagster as a service'
+title: 'Deploying Dagster as a service'
+sidebar_label: Dagster as a service
description: 'Learn how to deploy Dagster as a service on a single machine'
sidebar_position: 200
---
@@ -14,8 +15,6 @@ To follow the steps in this guide, you'll need:
-# Deploy Dagster as a service
-
This guide will walk you through deploying Dagster as a service on a single machine. It includes instructions for setting up the Dagster webserver and daemon. This approach is suitable for small-scale deployments or for testing purposes. For production environments, consider using containerized deployments or cloud-based solutions
## Running the Dagster Webserver
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/docker.md b/docs/docs-beta/docs/guides/deploy/deployment-options/docker.md
index 171439cd0a159..c0a57e21070bc 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/docker.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/docker.md
@@ -1,5 +1,6 @@
---
-title: "Deploying with Docker Compose"
+title: Deploying Dagster using Docker Compose
+sidebar_label: "Docker Compose"
description: A guide to deploying Dagster with Docker Compose.
sidebar_position: 400
---
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/google-cloud-platform.md b/docs/docs-beta/docs/guides/deploy/deployment-options/google-cloud-platform.md
index 0cdd2376cb6b5..14e5b2609b0a3 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/google-cloud-platform.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/google-cloud-platform.md
@@ -1,5 +1,6 @@
---
-title: "Deploy to Google Cloud Platform"
-unlisted: true
+title: "Deploying to Google Cloud Platform"
+sidebar_label: Google Cloud Platform
sidebar_position: 600
+unlisted: true
---
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/index.mdx b/docs/docs-beta/docs/guides/deploy/deployment-options/index.md
similarity index 100%
rename from docs/docs-beta/docs/guides/deploy/deployment-options/index.mdx
rename to docs/docs-beta/docs/guides/deploy/deployment-options/index.md
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/kubernetes.md b/docs/docs-beta/docs/guides/deploy/deployment-options/kubernetes.md
index f29729c61072b..5a99a221e7547 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/kubernetes.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/kubernetes.md
@@ -1,5 +1,6 @@
---
-title: "Deploy to Kubernetes"
+title: "Deploying to Kubernetes"
+sidebar_label: Kubernetes
sidebar_position: 300
---
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/microsoft-azure.md b/docs/docs-beta/docs/guides/deploy/deployment-options/microsoft-azure.md
index a33096ceb12c0..a0e673b149057 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/microsoft-azure.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/microsoft-azure.md
@@ -1,5 +1,6 @@
---
-title: "Deploy to Microsoft Azure"
-unlisted: true
+title: "Deploying to Microsoft Azure"
+sidebar_label: Microsoft Azure
sidebar_position: 700
+unlisted: true
---
diff --git a/docs/docs-beta/docs/guides/deploy/deployment-options/running-local-ui-development.md b/docs/docs-beta/docs/guides/deploy/deployment-options/running-local-ui-development.md
index f6e84c3fafe95..a95353fb083cb 100644
--- a/docs/docs-beta/docs/guides/deploy/deployment-options/running-local-ui-development.md
+++ b/docs/docs-beta/docs/guides/deploy/deployment-options/running-local-ui-development.md
@@ -1,15 +1,14 @@
---
title: Running Dagster locally
+sidebar_label: Local deployment
description: How to run Dagster on your local machine.
sidebar_position: 100
---
-# Running Dagster locally
-
In this guide, we'll walk you through how to run Dagster on your local machine using the `dagster dev` command. The `dagster dev` command launches the Dagster UI and the Dagster daemon, allowing you to start a full deployment of Dagster from the command line.
:::warning
-`dagster dev` is intended for local development _only_. If you want to run Dagster for production use cases, see our other [deployment guides](/guides/deploy/deployment-options/index.mdx).
+`dagster dev` is intended for local development _only_. If you want to run Dagster for production use cases, see our other [deployment guides](/guides/deploy/deployment-options/index.md).
:::
## Locating your code
diff --git a/docs/docs-beta/docs/guides/deploy/execution/dagster-daemon.md b/docs/docs-beta/docs/guides/deploy/execution/dagster-daemon.md
index 85da08fe450d3..bd41b85039b20 100644
--- a/docs/docs-beta/docs/guides/deploy/execution/dagster-daemon.md
+++ b/docs/docs-beta/docs/guides/deploy/execution/dagster-daemon.md
@@ -4,4 +4,3 @@ sidebar_position: 100
unlisted: true
---
-# Dagster daemon
diff --git a/docs/docs-beta/docs/guides/deploy/execution/index.mdx b/docs/docs-beta/docs/guides/deploy/execution/index.md
similarity index 88%
rename from docs/docs-beta/docs/guides/deploy/execution/index.mdx
rename to docs/docs-beta/docs/guides/deploy/execution/index.md
index c1bbbc2b8a5e0..0e89bcc08088e 100644
--- a/docs/docs-beta/docs/guides/deploy/execution/index.mdx
+++ b/docs/docs-beta/docs/guides/deploy/execution/index.md
@@ -1,6 +1,7 @@
---
title: "Execution"
sidebar_position: 20
+unlisted: true
---
import DocCardList from '@theme/DocCardList';
diff --git a/docs/docs-beta/docs/guides/deploy/execution/run-coordinators.md b/docs/docs-beta/docs/guides/deploy/execution/run-coordinators.md
index 48ce80b1690cc..3019bfad6bd2c 100644
--- a/docs/docs-beta/docs/guides/deploy/execution/run-coordinators.md
+++ b/docs/docs-beta/docs/guides/deploy/execution/run-coordinators.md
@@ -3,5 +3,3 @@ title: "Run coordinators"
sidebar_position: 400
unlisted: true
---
-
-# Run coordinators
diff --git a/docs/docs-beta/docs/guides/deploy/execution/run-launchers.md b/docs/docs-beta/docs/guides/deploy/execution/run-launchers.md
index f4982c170a6b3..d67cb820d9ae4 100644
--- a/docs/docs-beta/docs/guides/deploy/execution/run-launchers.md
+++ b/docs/docs-beta/docs/guides/deploy/execution/run-launchers.md
@@ -3,5 +3,3 @@ title: "Run launchers"
sidebar_position: 200
unlisted: true
---
-
-# Run launchers
diff --git a/docs/docs-beta/docs/guides/deploy/index.md b/docs/docs-beta/docs/guides/deploy/index.md
index 3a5ec3fb353d1..6ac717748d2f6 100644
--- a/docs/docs-beta/docs/guides/deploy/index.md
+++ b/docs/docs-beta/docs/guides/deploy/index.md
@@ -1,11 +1,8 @@
---
title: "Deploying Dagster"
-sidebar_position: 70
sidebar_class_name: hidden
---
-# Deploying Dagster
-
This section is about self-hosting Dagster.
:::info
diff --git a/docs/docs-beta/docs/guides/index.md b/docs/docs-beta/docs/guides/index.md
deleted file mode 100644
index cb467c230dadd..0000000000000
--- a/docs/docs-beta/docs/guides/index.md
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: "Guides"
-unlisted: true
----
-
-# Guides
diff --git a/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/decomission-the-airflow-dag.md b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/decomission-the-airflow-dag.md
new file mode 100644
index 0000000000000..f5ef62902e267
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/decomission-the-airflow-dag.md
@@ -0,0 +1,7 @@
+---
+title: 'Decommission the Airflow DAG'
+sidebar_position: 500
+unlisted: true
+---
+
+TK
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/index.md b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/index.md
new file mode 100644
index 0000000000000..f9865e5870fc9
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/index.md
@@ -0,0 +1,8 @@
+---
+title: 'Migrating from Airflow to Dagster'
+unlisted: true
+---
+
+import DocCardList from '@theme/DocCardList';
+
+
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/migrate-assets.md b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/migrate-assets.md
new file mode 100644
index 0000000000000..b4dc57583d0a8
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/migrate-assets.md
@@ -0,0 +1,7 @@
+---
+title: Migrate assets
+sidebar_position: 400
+unlisted: true
+---
+
+TK
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/observe-assets.md b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/observe-assets.md
new file mode 100644
index 0000000000000..275f9589e5ee4
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/observe-assets.md
@@ -0,0 +1,5 @@
+---
+title: 'Observe assets'
+sidebar_position: 300
+unlisted: true
+---
diff --git a/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/peer-to-local-airflow.md b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/peer-to-local-airflow.md
new file mode 100644
index 0000000000000..71dd324c3b344
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/peer-to-local-airflow.md
@@ -0,0 +1,5 @@
+---
+title: 'Peer to your local Airflow instance'
+sidebar_position: 200
+unlisted: true
+---
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/set-up-local-airflow.md b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/set-up-local-airflow.md
new file mode 100644
index 0000000000000..3a7733495aa0a
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/airflow-to-dagster/set-up-local-airflow.md
@@ -0,0 +1,7 @@
+---
+title: 'Set up a local Airflow instance'
+sidebar_position: 100
+unlisted: true
+---
+
+TK
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/migrate/index.md b/docs/docs-beta/docs/guides/migrate/index.md
new file mode 100644
index 0000000000000..18f1339c8360c
--- /dev/null
+++ b/docs/docs-beta/docs/guides/migrate/index.md
@@ -0,0 +1,9 @@
+---
+title: "Migrate"
+description: "Migrate to Dagster from other platforms"
+sidebar_class_name: hidden
+---
+
+import DocCardList from '@theme/DocCardList';
+
+
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/monitor/index.md b/docs/docs-beta/docs/guides/monitor/index.md
index aaf06312205dc..ce5477150b9b6 100644
--- a/docs/docs-beta/docs/guides/monitor/index.md
+++ b/docs/docs-beta/docs/guides/monitor/index.md
@@ -1,6 +1,5 @@
---
title: "Monitoring pipelines"
-sidebar_position: 50
sidebar_class_name: hidden
---
diff --git a/docs/docs-beta/docs/guides/monitor/logging/built-in-loggers.md b/docs/docs-beta/docs/guides/monitor/logging/built-in-loggers.md
new file mode 100644
index 0000000000000..43fc3badbfec8
--- /dev/null
+++ b/docs/docs-beta/docs/guides/monitor/logging/built-in-loggers.md
@@ -0,0 +1,6 @@
+---
+title: Built-in loggers
+sidebar_position: 500
+unlisted: true
+---
+
diff --git a/docs/docs-beta/docs/guides/monitor/logging/custom-logging.md b/docs/docs-beta/docs/guides/monitor/logging/custom-logging.md
index c3bf0380c5621..60a8e16ae8d8c 100644
--- a/docs/docs-beta/docs/guides/monitor/logging/custom-logging.md
+++ b/docs/docs-beta/docs/guides/monitor/logging/custom-logging.md
@@ -1,10 +1,8 @@
---
-title: "Setting up custom logging"
-sidebar_position: 100
+title: "Custom loggers"
+sidebar_position: 200
---
-# Custom loggers
-
Custom loggers are used to alter the structure of the logs being produced by your Dagster pipelines. For example, JSON logs can be produced to more easily be processed by log management systems.
@@ -40,7 +38,7 @@ Configuring a ops job to use the custom logger slightly differs from the asset j
### Expected `json_console_logger` output
-The `json_console_logger` will emit an exhaustive single line JSON document containing the full log record, including the dagster metadata fields.
+The `json_console_logger` will emit an exhaustive single line JSON document containing the full log record, including the Dagster metadata fields.
Here's an example of the output for reference, formatted for readability:
diff --git a/docs/docs-beta/docs/guides/monitor/logging/custom-metrics-logs.md b/docs/docs-beta/docs/guides/monitor/logging/custom-metrics-logs.md
index 62476124e059c..add7d01544132 100644
--- a/docs/docs-beta/docs/guides/monitor/logging/custom-metrics-logs.md
+++ b/docs/docs-beta/docs/guides/monitor/logging/custom-metrics-logs.md
@@ -1,5 +1,5 @@
---
-title: "Use custom metrics in logs"
-sidebar_position: 200
+title: "Using custom metrics in logs"
+sidebar_position: 300
unlisted: true
---
diff --git a/docs/docs-beta/docs/guides/monitor/logging/index.mdx b/docs/docs-beta/docs/guides/monitor/logging/index.md
similarity index 100%
rename from docs/docs-beta/docs/guides/monitor/logging/index.mdx
rename to docs/docs-beta/docs/guides/monitor/logging/index.md
diff --git a/docs/docs-beta/docs/guides/monitor/logging/log-types-and-loggers.md b/docs/docs-beta/docs/guides/monitor/logging/log-types-and-loggers.md
new file mode 100644
index 0000000000000..f9b224ab34fbc
--- /dev/null
+++ b/docs/docs-beta/docs/guides/monitor/logging/log-types-and-loggers.md
@@ -0,0 +1,20 @@
+---
+title: Log types and loggers
+sidebar_position: 100
+unlisted: true
+---
+
+## Log types
+
+### Structured event logs
+
+### Raw compute logs
+
+## Loggers
+
+### Built-in loggers
+
+### External loggers
+
+
+{/* TODO copy from https://docs.dagster.io/concepts/logging */}
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/monitor/logging/python-logging.md b/docs/docs-beta/docs/guides/monitor/logging/python-logging.md
new file mode 100644
index 0000000000000..db53c8e455d53
--- /dev/null
+++ b/docs/docs-beta/docs/guides/monitor/logging/python-logging.md
@@ -0,0 +1,6 @@
+---
+title: Python logging
+sidebar_position: 400
+unlisted: true
+---
+
diff --git a/docs/docs-beta/docs/guides/operate/index.md b/docs/docs-beta/docs/guides/operate/index.md
new file mode 100644
index 0000000000000..8d1bc9055dc8d
--- /dev/null
+++ b/docs/docs-beta/docs/guides/operate/index.md
@@ -0,0 +1,8 @@
+---
+title: "Operating pipelines"
+sidebar_class_name: hidden
+---
+
+import DocCardList from '@theme/DocCardList';
+
+
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/configure/io-managers.md b/docs/docs-beta/docs/guides/operate/io-managers.md
similarity index 93%
rename from docs/docs-beta/docs/guides/build/configure/io-managers.md
rename to docs/docs-beta/docs/guides/operate/io-managers.md
index c64f4a909d995..c3f41fa5e7abc 100644
--- a/docs/docs-beta/docs/guides/build/configure/io-managers.md
+++ b/docs/docs-beta/docs/guides/operate/io-managers.md
@@ -1,7 +1,5 @@
---
title: "Managing stored data with I/O managers"
-sidebar_position: 800
-sidebar_label: "I/O managers"
---
I/O managers in Dagster allow you to keep the code for data processing separate from the code for reading and writing data. This reduces repetitive code and makes it easier to change where your data is stored.
@@ -19,8 +17,8 @@ For assets that follow this pattern, an I/O manager can streamline the code that
To follow the steps in this guide, you'll need familiarity with:
-- [Assets](/guides/build/assets-concepts/index.mdx
-- [Resources](/guides/build/configure/resources)
+- [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
+- [Resources](/guides/build/connect-to-external-systems/resources)
## Before you begin
@@ -37,7 +35,7 @@ To follow the steps in this guide, you'll need familiarity with:
- Your pipeline manages I/O on its own by using other libraries/tools that write to storage
- Your assets won't fit in memory, such as a database table with billions of rows
-As a general rule, if your pipeline becomes more complicated in order to use I/O managers, it's likely that I/O managers aren't a good fit. In these cases you should use `deps` to [define dependencies](/guides/build/assets-concepts/asset-dependencies).
+As a general rule, if your pipeline becomes more complicated in order to use I/O managers, it's likely that I/O managers aren't a good fit. In these cases you should use `deps` to [define dependencies](/guides/build/create-asset-pipelines/assets-concepts/asset-dependencies).
## Using I/O managers in assets \{#io-in-assets}
@@ -84,5 +82,5 @@ Dagster offers built-in library implementations for I/O managers for popular dat
## Next steps
-- Learn to [connect databases](/guides/build/configure/databases) with resources
-- Learn to [connect APIs](/guides/build/configure/apis) with resources
\ No newline at end of file
+- Learn to [connect databases](/guides/build/connect-to-external-systems/databases) with resources
+- Learn to [connect APIs](/guides/build/connect-to-external-systems/apis) with resources
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/build/configure/managing-concurrency.md b/docs/docs-beta/docs/guides/operate/managing-concurrency.md
similarity index 100%
rename from docs/docs-beta/docs/guides/build/configure/managing-concurrency.md
rename to docs/docs-beta/docs/guides/operate/managing-concurrency.md
diff --git a/docs/docs-beta/docs/guides/operate/run-executors.md b/docs/docs-beta/docs/guides/operate/run-executors.md
new file mode 100644
index 0000000000000..ea773a83b5acb
--- /dev/null
+++ b/docs/docs-beta/docs/guides/operate/run-executors.md
@@ -0,0 +1,5 @@
+---
+title: About run executors
+sidebar_position: 100
+unlisted: true
+---
diff --git a/docs/docs-beta/docs/guides/test/asset-checks.md b/docs/docs-beta/docs/guides/test/asset-checks.md
index b28bee6e46e3e..3372c80ecd817 100644
--- a/docs/docs-beta/docs/guides/test/asset-checks.md
+++ b/docs/docs-beta/docs/guides/test/asset-checks.md
@@ -1,5 +1,5 @@
---
-title: "Testing assets with Asset checks"
+title: "Testing assets with asset checks"
sidebar_position: 100
sidebar_label: "Asset checks"
---
@@ -17,7 +17,7 @@ Each asset check should test only a single asset property to keep tests uncompli
To follow this guide, you'll need:
-- Familiarity with [Assets](/guides/build/assets-concepts/index.mdx
+- Familiarity with [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
## Getting started
@@ -40,7 +40,7 @@ A asset check is defined using the `@asset_check` decorator.
The following example defines an asset check on an asset that fails if the `order_id` column of the asset contains a null value. The asset check will run after the asset has been materialized.
-
+
## Defining multiple asset checks \{#multiple-checks}
@@ -53,13 +53,13 @@ The following example defines two asset checks using the `@multi_asset_check` de
In this example, both asset checks will run in a single operation after the asset has been materialized.
-
+
## Programmatically generating asset checks \{#factory-pattern}
Defining multiple checks can also be done using a factory pattern. The example below defines the same two asset checks as in the previous example, but this time using a factory pattern and the `@multi_asset_check` decorator.
-
+
## Blocking downstream materialization
@@ -67,7 +67,7 @@ By default, if a parent's asset check fails during a run, the run will continue
In the example bellow, if the `orders_id_has_no_nulls` check fails, the downstream `augmented_orders` asset won't be materialized.
-
+
## Scheduling and monitoring asset checks
@@ -75,10 +75,10 @@ In some cases, running asset checks separately from the job materializing the as
In the example below, two jobs are defined: one for the asset and another for the asset check. Schedules are defined to materialize the asset and execute the asset check independently. A sensor is defined to send an email alert when the asset check job fails.
-
+
## Next steps
-- Learn more about assets in [Understanding Assets](/guides/build/assets-concepts/index.mdx
+- Learn more about assets in [Understanding Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- Learn more about asset checks in [Understanding Asset Checks](/guides/test/asset-checks)
- Learn how to use [Great Expectations with Dagster](https://dagster.io/blog/ensuring-data-quality-with-dagster-and-great-expectations)
diff --git a/docs/docs-beta/docs/guides/test/data-freshness-testing.md b/docs/docs-beta/docs/guides/test/data-freshness-testing.md
index a738941c2d8f5..d1d75d1ac3307 100644
--- a/docs/docs-beta/docs/guides/test/data-freshness-testing.md
+++ b/docs/docs-beta/docs/guides/test/data-freshness-testing.md
@@ -18,8 +18,8 @@ Freshness checks can also communicate SLAs for their data freshness. For example
To follow the steps in this guide, you'll need familiarity with:
-- [Assets](/guides/build/create-a-pipeline/data-assets)
-- [External assets](/guides/build/create-a-pipeline/external-assets)
+- [Assets](/guides/build/create-asset-pipelines/data-assets)
+- [External assets](/guides/build/create-asset-pipelines/external-assets)
- [Asset checks](/guides/test/asset-checks)
@@ -45,7 +45,7 @@ The example below defines a freshness check on an asset that fails if the asset'
## External asset freshness \{#external-assets}
-[External assets](/guides/build/create-a-pipeline/external-assets) are assets orchestrated by systems other than Dagster.
+[External assets](/guides/build/create-asset-pipelines/external-assets) are assets orchestrated by systems other than Dagster.
To run freshness checks on external assets, the checks need to know when the external assets were last updated. Emitting these update timestamps as values for the `dagster/last_updated_timestamp` observation metadata key allows Dagster to calculate whether the asset is overdue.
diff --git a/docs/docs-beta/docs/guides/test/index.md b/docs/docs-beta/docs/guides/test/index.md
index 6284fc0b252bc..31ceb625d0813 100644
--- a/docs/docs-beta/docs/guides/test/index.md
+++ b/docs/docs-beta/docs/guides/test/index.md
@@ -1,7 +1,8 @@
---
title: "Testing assets"
-sidebar_position: 60
sidebar_class_name: hidden
---
-This section is about testing assets.
\ No newline at end of file
+import DocCardList from '@theme/DocCardList';
+
+
\ No newline at end of file
diff --git a/docs/docs-beta/docs/guides/test/unit-tests-assets-and-ops.md b/docs/docs-beta/docs/guides/test/unit-tests-assets-and-ops.md
index 057ef145945bd..951e6c1097f31 100644
--- a/docs/docs-beta/docs/guides/test/unit-tests-assets-and-ops.md
+++ b/docs/docs-beta/docs/guides/test/unit-tests-assets-and-ops.md
@@ -15,7 +15,7 @@ This guide covers how to write unit tests for assets and ops with a variety of d
To follow the steps in this guide, you'll need familiarity with:
-- [Assets](/guides/build/assets-concepts/index.mdx
+- [Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- [Ops and Jobs](/guides/build/ops-jobs)
@@ -106,6 +106,6 @@ If an asset or op has multiple parameters, it's recommended to use keyword argum
## Next steps
-- Learn more about assets in [Understanding Assets](/guides/build/assets-concepts/index.mdx
+- Learn more about assets in [Understanding Assets](/guides/build/create-asset-pipelines/assets-concepts/index.mdx
- Learn more about ops in [Understanding Ops](/guides/build/ops-jobs)
-- Learn more about resources in [Resources](/guides/build/configure/resources)
\ No newline at end of file
+- Learn more about resources in [Resources](/guides/build/connect-to-external-systems/resources)
\ No newline at end of file
diff --git a/docs/docs-beta/docs/intro.md b/docs/docs-beta/docs/intro.md
index 56b22d535a3c7..cdd230427c2e3 100644
--- a/docs/docs-beta/docs/intro.md
+++ b/docs/docs-beta/docs/intro.md
@@ -29,7 +29,7 @@ Dagster is a data orchestrator built for data engineers, with integrated lineage
Build your first Dagster pipeline in our Quickstart tutorial.
-
+
New to Dagster? Learn about how thinking in assets can help you manage your data better.
diff --git a/docs/docs-beta/sidebars.ts b/docs/docs-beta/sidebars.ts
index 8bea9d55b463a..4ebabf11210e2 100644
--- a/docs/docs-beta/sidebars.ts
+++ b/docs/docs-beta/sidebars.ts
@@ -42,6 +42,18 @@ const sidebars: SidebarsConfig = {
},
],
},
+ {
+ type: 'category',
+ label: 'Operate',
+ collapsed: false,
+ link: {type: 'doc', id: 'guides/operate/index'},
+ items: [
+ {
+ type: 'autogenerated',
+ dirName: 'guides/operate'
+ }
+ ]
+ },
{
type: 'category',
label: 'Monitor',
@@ -78,6 +90,18 @@ const sidebars: SidebarsConfig = {
},
],
},
+ {
+ type: 'category',
+ label: 'Migrate',
+ link: {type: 'doc', id: 'guides/migrate/index'},
+ collapsed: false,
+ items: [
+ {
+ type: 'autogenerated',
+ dirName: 'guides/migrate'
+ }
+ ]
+ },
{
type: 'category',
label: 'About',