From 730f56eed10cdc86fdf54256070adb4bd7510e8c Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Tue, 3 Dec 2024 16:15:02 -0800 Subject: [PATCH 01/92] [UI v2] feat: Adds GlobalConcurrencyLimitEmptyState component to be used in concurrency route development (#16195) --- ...bal-concurrency-limit-empty-state.test.tsx | 18 +++++++++++ .../global-concurrency-limit-empty-state.tsx | 30 +++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx create mode 100644 ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx diff --git a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx b/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx new file mode 100644 index 000000000000..d26184237fc3 --- /dev/null +++ b/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx @@ -0,0 +1,18 @@ +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { describe, expect, it, vi } from "vitest"; +import { GlobalConcurrencyLimitEmptyState } from "./global-concurrency-limit-empty-state"; + +describe("GlobalConcurrencyLimitEmptyState", () => { + it("when adding limit, callback gets fired", async () => { + const user = userEvent.setup(); + + const mockFn = vi.fn(); + + render(); + await user.click( + screen.getByRole("button", { name: /Add Concurrency Limit/i }), + ); + expect(mockFn).toHaveBeenCalledOnce(); + }); +}); diff --git a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx b/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx new file mode 100644 index 000000000000..90d6dee1ca9c --- /dev/null +++ b/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx @@ -0,0 +1,30 @@ +import { Button } from "@/components/ui/button"; +import { DocsLink } from "@/components/ui/docs-link"; +import { + EmptyState, + EmptyStateActions, + EmptyStateDescription, + EmptyStateIcon, + EmptyStateTitle, +} from "@/components/ui/empty-state"; +import { PlusIcon } from "lucide-react"; + +type Props = { + onClick: () => void; +}; +export const GlobalConcurrencyLimitEmptyState = ({ onClick }: Props) => ( + + + Add a concurrency limit + + Global concurrency limits can be applied to flow runs, task runs and any + operation where you want to control concurrency. + + + + + + +); From 1cab155444cc32e73582cec4becb8290917d89c8 Mon Sep 17 00:00:00 2001 From: Evan Liu Date: Thu, 5 Dec 2024 04:01:27 +1300 Subject: [PATCH 02/92] [UI v2] Use cva() on StateBadge (#16209) --- ui-v2/src/components/ui/state-badge/index.tsx | 45 ++++++------ .../ui/state-badge/state-badge.stories.tsx | 70 ++++++++----------- .../ui/state-badge/state-badge.test.tsx | 12 ++-- 3 files changed, 59 insertions(+), 68 deletions(-) diff --git a/ui-v2/src/components/ui/state-badge/index.tsx b/ui-v2/src/components/ui/state-badge/index.tsx index 00874b7693b6..0b7d622b062e 100644 --- a/ui-v2/src/components/ui/state-badge/index.tsx +++ b/ui-v2/src/components/ui/state-badge/index.tsx @@ -1,14 +1,15 @@ -import { Badge } from "../badge"; import type { components } from "@/api/prefect"; +import { cva } from "class-variance-authority"; import { + BanIcon, + CheckIcon, ClockIcon, PauseIcon, - XIcon, - CheckIcon, - ServerCrashIcon, - BanIcon, PlayIcon, + ServerCrashIcon, + XIcon, } from "lucide-react"; +import { Badge } from "../badge"; const ICONS = { COMPLETED: CheckIcon, @@ -25,29 +26,31 @@ const ICONS = { React.ElementType >; -const CLASSES = { - COMPLETED: "bg-green-50 text-green-600 hover:bg-green-50", - FAILED: "bg-red-50 text-red-600 hover:bg-red-50", - RUNNING: "bg-blue-100 text-blue-700 hover:bg-blue-100", - CANCELLED: "bg-gray-300 text-gray-800 hover:bg-gray-300", - CANCELLING: "bg-gray-300 text-gray-800 hover:bg-gray-300", - CRASHED: "bg-orange-50 text-orange-600 hover:bg-orange-50", - PAUSED: "bg-gray-300 text-gray-800 hover:bg-gray-300", - PENDING: "bg-gray-300 text-gray-800 hover:bg-gray-300", - SCHEDULED: "bg-yellow-100 text-yellow-700 hover:bg-yellow-100", -} as const satisfies Record; +const stateBadgeVariants = cva("gap-1", { + variants: { + state: { + COMPLETED: "bg-green-50 text-green-600 hover:bg-green-50", + FAILED: "bg-red-50 text-red-600 hover:bg-red-50", + RUNNING: "bg-blue-100 text-blue-700 hover:bg-blue-100", + CANCELLED: "bg-gray-300 text-gray-800 hover:bg-gray-300", + CANCELLING: "bg-gray-300 text-gray-800 hover:bg-gray-300", + CRASHED: "bg-orange-50 text-orange-600 hover:bg-orange-50", + PAUSED: "bg-gray-300 text-gray-800 hover:bg-gray-300", + PENDING: "bg-gray-300 text-gray-800 hover:bg-gray-300", + SCHEDULED: "bg-yellow-100 text-yellow-700 hover:bg-yellow-100", + } satisfies Record, + }, +}); export const StateBadge = ({ state, }: { state: components["schemas"]["State"] }) => { const Icon = ICONS[state.type]; return ( - -
- + + - {state.name} -
+ {state.name}
); }; diff --git a/ui-v2/src/components/ui/state-badge/state-badge.stories.tsx b/ui-v2/src/components/ui/state-badge/state-badge.stories.tsx index 1f1baf56bd08..ac91f503d788 100644 --- a/ui-v2/src/components/ui/state-badge/state-badge.stories.tsx +++ b/ui-v2/src/components/ui/state-badge/state-badge.stories.tsx @@ -1,47 +1,35 @@ +import type { components } from "@/api/prefect.ts"; import type { Meta, StoryObj } from "@storybook/react"; import { StateBadge } from "."; -const meta = { - title: "UI/StateBadge", - component: StateBadge, - argTypes: { - state: { - options: [ - "COMPLETED", - "FAILED", - "RUNNING", - "PENDING", - "PAUSED", - "CANCELLED", - "CANCELLING", - "CRASHED", - "SCHEDULED", - "LATE", - ], - mapping: { - COMPLETED: { type: "COMPLETED", name: "Completed" }, - FAILED: { type: "FAILED", name: "Failed" }, - RUNNING: { type: "RUNNING", name: "Running" }, - PENDING: { type: "PENDING", name: "Pending" }, - PAUSED: { type: "PAUSED", name: "Paused" }, - CANCELLED: { type: "CANCELLED", name: "Cancelled" }, - CANCELLING: { type: "CANCELLING", name: "Cancelling" }, - CRASHED: { type: "CRASHED", name: "Crashed" }, - SCHEDULED: { type: "SCHEDULED", name: "Scheduled" }, - LATE: { type: "SCHEDULED", name: "Late" }, - }, - }, - }, -} satisfies Meta; +const badgesByState: Record = { + COMPLETED: ["Completed"], + FAILED: ["Failed"], + RUNNING: ["Running"], + PENDING: ["Pending"], + PAUSED: ["Paused"], + CANCELLED: ["Cancelled"], + CANCELLING: ["Cancelling"], + CRASHED: ["Crashed"], + SCHEDULED: ["Scheduled", "Late"], +}; -export default meta; -type Story = StoryObj; +export const story: StoryObj = { name: "StateBadge" }; -export const States: Story = { - args: { - state: { - type: "COMPLETED", - name: "Completed", - }, +export default { + title: "UI/StateBadge", + component: function StateBadgeStories() { + return ( +
+ {Object.entries(badgesByState).map(([type, names]) => + names.map((name) => ( + + )), + )} +
+ ); }, -}; +} satisfies Meta; diff --git a/ui-v2/src/components/ui/state-badge/state-badge.test.tsx b/ui-v2/src/components/ui/state-badge/state-badge.test.tsx index b46aa153ff93..da73b15ec0fd 100644 --- a/ui-v2/src/components/ui/state-badge/state-badge.test.tsx +++ b/ui-v2/src/components/ui/state-badge/state-badge.test.tsx @@ -1,15 +1,15 @@ import { render, screen } from "@testing-library/react"; -import { StateBadge } from "./index"; import { + BanIcon, + CheckIcon, ClockIcon, PauseIcon, - XIcon, - CheckIcon, - ServerCrashIcon, - BanIcon, PlayIcon, + ServerCrashIcon, + XIcon, } from "lucide-react"; import { describe, expect, test } from "vitest"; +import { StateBadge } from "./index"; describe("StateBadge", () => { const states = [ @@ -87,7 +87,7 @@ describe("StateBadge", () => { SCHEDULED: "bg-yellow-100 text-yellow-700 hover:bg-yellow-100", }[type]; - expect(badge?.parentElement).toHaveClass(...expectedClasses.split(" ")); + expect(badge).toHaveClass(...expectedClasses.split(" ")); }, ); }); From d47081f81676b95d3daea4497c6116afe8bb08d5 Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Wed, 4 Dec 2024 07:11:03 -0800 Subject: [PATCH 03/92] [UI v2] feat: Adds EmptyState for Task Run Concurrency Limits (#16203) --- ...run-concurrency-limit-empty-state.test.tsx | 18 +++++++++++ ...task-run-concurrency-limit-empty-state.tsx | 32 +++++++++++++++++++ ui-v2/src/components/ui/docs-link.tsx | 2 ++ 3 files changed, 52 insertions(+) create mode 100644 ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx create mode 100644 ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx diff --git a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx b/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx new file mode 100644 index 000000000000..34b64a9531b6 --- /dev/null +++ b/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx @@ -0,0 +1,18 @@ +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { describe, expect, it, vi } from "vitest"; +import { TaskRunConcurrencyLimitEmptyState } from "./task-run-concurrency-limit-empty-state"; + +describe("TaskRunConcurrencyLimitEmptyState", () => { + it("when adding task run concurrency limit, callback gets fired", async () => { + const user = userEvent.setup(); + + const mockFn = vi.fn(); + + render(); + await user.click( + screen.getByRole("button", { name: /Add Concurrency Limit/i }), + ); + expect(mockFn).toHaveBeenCalledOnce(); + }); +}); diff --git a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx b/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx new file mode 100644 index 000000000000..7256ff8b81a4 --- /dev/null +++ b/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx @@ -0,0 +1,32 @@ +import { Button } from "@/components/ui/button"; +import { DocsLink } from "@/components/ui/docs-link"; +import { + EmptyState, + EmptyStateActions, + EmptyStateDescription, + EmptyStateIcon, + EmptyStateTitle, +} from "@/components/ui/empty-state"; +import { PlusIcon } from "lucide-react"; + +type Props = { + onClick: () => void; +}; +export const TaskRunConcurrencyLimitEmptyState = ({ onClick }: Props) => ( + + + + Add a concurrency limit for your task runs + + + Creating a limit allows you to limit the number of tasks running + simultaneously with a given tag. + + + + + + +); diff --git a/ui-v2/src/components/ui/docs-link.tsx b/ui-v2/src/components/ui/docs-link.tsx index 2669d8bdc5bf..c2dc4ee6ad7a 100644 --- a/ui-v2/src/components/ui/docs-link.tsx +++ b/ui-v2/src/components/ui/docs-link.tsx @@ -5,6 +5,8 @@ import { Button } from "./button"; const DOCS_LINKS = { "global-concurrency-guide": "https://docs.prefect.io/v3/develop/global-concurrency-limits", + "task-concurrency-guide": + "https://docs.prefect.io/v3/develop/task-run-limits", "variables-guide": "https://docs.prefect.io/latest/guides/variables/", } as const; From 47aa8f2a62d3f1bb741c55f5e11476a4eed5b933 Mon Sep 17 00:00:00 2001 From: Kevin Grismore <146098880+kevingrismore@users.noreply.github.com> Date: Wed, 4 Dec 2024 09:47:50 -0600 Subject: [PATCH 04/92] Add `from_profiles_yml` to dbt `TargetConfigs` (#16178) --- .../prefect_dbt/cli/configs/base.py | 77 +- .../prefect-dbt/prefect_dbt/utilities.py | 40 + .../tests/cli/configs/test_base.py | 116 ++ .../prefect-dbt/tests/cloud/test_jobs.py | 1392 +++++++++-------- .../prefect-dbt/tests/cloud/test_runs.py | 290 ++-- .../prefect-dbt/tests/cloud/test_utils.py | 108 +- .../prefect-dbt/tests/test_utilities.py | 74 + 7 files changed, 1214 insertions(+), 883 deletions(-) create mode 100644 src/integrations/prefect-dbt/prefect_dbt/utilities.py create mode 100644 src/integrations/prefect-dbt/tests/test_utilities.py diff --git a/src/integrations/prefect-dbt/prefect_dbt/cli/configs/base.py b/src/integrations/prefect-dbt/prefect_dbt/cli/configs/base.py index 4aff4a005c1c..1104fc0b7a6c 100644 --- a/src/integrations/prefect-dbt/prefect_dbt/cli/configs/base.py +++ b/src/integrations/prefect-dbt/prefect_dbt/cli/configs/base.py @@ -2,11 +2,13 @@ import abc from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, Type from pydantic import BaseModel, Field +from typing_extensions import Self from prefect.blocks.core import Block +from prefect_dbt.utilities import load_profiles_yml class DbtConfigs(Block, abc.ABC): @@ -147,6 +149,79 @@ class TargetConfigs(BaseTargetConfigs): _logo_url = "https://images.ctfassets.net/gm98wzqotmnx/5zE9lxfzBHjw3tnEup4wWL/9a001902ed43a84c6c96d23b24622e19/dbt-bit_tm.png?h=250" # noqa _documentation_url = "https://docs.prefect.io/integrations/prefect-dbt" # noqa + @classmethod + def from_profiles_yml( + cls: Type[Self], + profile_name: Optional[str] = None, + target_name: Optional[str] = None, + profiles_dir: Optional[str] = None, + allow_field_overrides: bool = False, + ) -> "TargetConfigs": + """ + Create a TargetConfigs instance from a dbt profiles.yml file. + + Args: + profile_name: Name of the profile to use from profiles.yml. + If None, uses the first profile. + target_name: Name of the target to use from the profile. + If None, uses the default target in the selected profile. + profiles_dir: Path to the directory containing profiles.yml. + If None, uses the default profiles directory. + allow_field_overrides: If enabled, fields from dbt target configs + will override fields provided in extras and credentials. + + Returns: + A TargetConfigs instance populated from the profiles.yml target. + + Raises: + ValueError: If profiles.yml is not found or if profile/target is invalid + """ + profiles = load_profiles_yml(profiles_dir) + + # If no profile specified, use first non-config one + if profile_name is None: + for name in profiles: + if name != "config": + profile_name = name + break + elif profile_name not in profiles: + raise ValueError(f"Profile {profile_name} not found in profiles.yml") + + profile = profiles[profile_name] + if "outputs" not in profile: + raise ValueError(f"No outputs found in profile {profile_name}") + + outputs = profile["outputs"] + + # If no target specified, use default target + if target_name is None: + target_name = profile["target"] + elif target_name not in outputs: + raise ValueError( + f"Target {target_name} not found in profile {profile_name}" + ) + + target_config = outputs[target_name] + + type = target_config.pop("type") + schema = None + possible_keys = ["schema", "path", "dataset", "database"] + for key in possible_keys: + if key in target_config: + schema = target_config.pop(key) + break + + if schema is None: + raise ValueError(f"No schema found. Expected one of: {possible_keys}") + threads = target_config.pop("threads", 4) + return cls( + type=type, + schema=schema, + threads=threads, + extras=target_config or None, + allow_field_overrides=allow_field_overrides, + ) + class GlobalConfigs(DbtConfigs): """ diff --git a/src/integrations/prefect-dbt/prefect_dbt/utilities.py b/src/integrations/prefect-dbt/prefect_dbt/utilities.py new file mode 100644 index 000000000000..9430e869ef0a --- /dev/null +++ b/src/integrations/prefect-dbt/prefect_dbt/utilities.py @@ -0,0 +1,40 @@ +""" +Utility functions for prefect-dbt +""" +import os +from typing import Any, Dict, Optional + +import yaml + + +def get_profiles_dir() -> str: + """Get the dbt profiles directory from environment or default location.""" + profiles_dir = os.getenv("DBT_PROFILES_DIR") + if not profiles_dir: + profiles_dir = os.path.expanduser("~/.dbt") + return profiles_dir + + +def load_profiles_yml(profiles_dir: Optional[str]) -> Dict[str, Any]: + """ + Load and parse the profiles.yml file. + + Args: + profiles_dir: Path to the directory containing profiles.yml. + If None, uses the default profiles directory. + + Returns: + Dict containing the parsed profiles.yml contents + + Raises: + ValueError: If profiles.yml is not found + """ + if profiles_dir is None: + profiles_dir = get_profiles_dir() + + profiles_path = os.path.join(profiles_dir, "profiles.yml") + if not os.path.exists(profiles_path): + raise ValueError(f"No profiles.yml found at {profiles_path}") + + with open(profiles_path, "r") as f: + return yaml.safe_load(f) diff --git a/src/integrations/prefect-dbt/tests/cli/configs/test_base.py b/src/integrations/prefect-dbt/tests/cli/configs/test_base.py index dafd1aa625c1..3d5dd39a68c5 100644 --- a/src/integrations/prefect-dbt/tests/cli/configs/test_base.py +++ b/src/integrations/prefect-dbt/tests/cli/configs/test_base.py @@ -1,8 +1,48 @@ from pathlib import Path +from unittest.mock import patch import pytest from prefect_dbt.cli.configs.base import GlobalConfigs, TargetConfigs +SAMPLE_PROFILES = { + "jaffle_shop": { + "outputs": { + "dev": { + "type": "duckdb", + "path": "jaffle_shop.duckdb", + "schema": "main", + "threads": 4, + }, + "prod": { + "type": "duckdb", + "path": "/data/prod/jaffle_shop.duckdb", + "schema": "main", + "threads": 8, + }, + }, + "target": "prod", + }, + "other_project": { + "outputs": { + "dev": { + "type": "duckdb", + "path": "other_project.duckdb", + "schema": "analytics", + "threads": 4, + } + }, + "target": "dev", + }, + "config": {"partial_parse": True}, +} + + +@pytest.fixture +def mock_load_profiles(): + with patch("prefect_dbt.cli.configs.base.load_profiles_yml") as mock: + mock.return_value = SAMPLE_PROFILES + yield mock + def test_target_configs_get_configs(): target_configs = TargetConfigs( @@ -41,3 +81,79 @@ def test_global_configs(): global_configs = GlobalConfigs(log_format="json", send_anonymous_usage_stats=False) assert global_configs.log_format == "json" assert global_configs.send_anonymous_usage_stats is False + + +def test_from_profiles_yml_default_profile_target(mock_load_profiles): + target_configs = TargetConfigs.from_profiles_yml() + + assert target_configs.type == "duckdb" + assert target_configs.schema_ == "main" + assert target_configs.threads == 8 + assert target_configs.extras == {"path": "/data/prod/jaffle_shop.duckdb"} + + +def test_from_profiles_yml_explicit_profile_target(mock_load_profiles): + target_configs = TargetConfigs.from_profiles_yml( + profile_name="other_project", target_name="dev" + ) + + assert target_configs.type == "duckdb" + assert target_configs.schema_ == "analytics" + assert target_configs.threads == 4 + assert target_configs.extras == {"path": "other_project.duckdb"} + + +def test_from_profiles_yml_invalid_profile(mock_load_profiles): + with pytest.raises(ValueError, match="Profile invalid_profile not found"): + TargetConfigs.from_profiles_yml(profile_name="invalid_profile") + + +def test_from_profiles_yml_invalid_target(mock_load_profiles): + with pytest.raises(ValueError, match="Target invalid_target not found"): + TargetConfigs.from_profiles_yml( + profile_name="jaffle_shop", target_name="invalid_target" + ) + + +def test_from_profiles_yml_no_outputs(mock_load_profiles): + mock_load_profiles.return_value = {"broken": {"some_other_key": {}}} + with pytest.raises(ValueError, match="No outputs found in profile broken"): + TargetConfigs.from_profiles_yml(profile_name="broken") + + +def test_from_profiles_yml_no_schema(mock_load_profiles): + mock_load_profiles.return_value = { + "test": { + "outputs": { + "dev": { + "type": "postgres", + "threads": 4, + # Missing schema field + "host": "localhost", + } + }, + "target": "dev", + } + } + with pytest.raises(ValueError, match="No schema found"): + TargetConfigs.from_profiles_yml(profile_name="test") + + +def test_from_profiles_yml_alternative_schema_keys(mock_load_profiles): + mock_profiles = { + "test": { + "outputs": { + "dev": { + "type": "bigquery", + "threads": 4, + "dataset": "my_dataset", # Alternative to schema + "project": "my_project", + } + }, + "target": "dev", + } + } + mock_load_profiles.return_value = mock_profiles + + target_configs = TargetConfigs.from_profiles_yml(profile_name="test") + assert target_configs.schema_ == "my_dataset" diff --git a/src/integrations/prefect-dbt/tests/cloud/test_jobs.py b/src/integrations/prefect-dbt/tests/cloud/test_jobs.py index 621dd27e260d..28d33bc2ebd0 100644 --- a/src/integrations/prefect-dbt/tests/cloud/test_jobs.py +++ b/src/integrations/prefect-dbt/tests/cloud/test_jobs.py @@ -2,6 +2,7 @@ import os import pytest +import respx from httpx import Response from prefect_dbt.cloud.credentials import DbtCloudCredentials from prefect_dbt.cloud.exceptions import ( @@ -37,12 +38,6 @@ def dbt_cloud_job(dbt_cloud_credentials): return DbtCloudJob(job_id=10000, dbt_cloud_credentials=dbt_cloud_credentials) -@pytest.fixture -def respx_mock_with_pass_through(respx_mock): - respx_mock.route(host="127.0.0.1").pass_through() - return respx_mock - - HEADERS = { "Authorization": "Bearer my_api_key", "x-dbt-partner-source": "prefect", @@ -51,403 +46,390 @@ def respx_mock_with_pass_through(respx_mock): class TestTriggerDbtCloudJobRun: - async def test_get_dbt_cloud_job_info( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/12/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000}})) - - response = await get_dbt_cloud_job_info.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=12, - order_by="id", - ) - - assert response == {"id": 10000} - - async def test_trigger_job_with_no_options( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} + async def test_get_dbt_cloud_job_info(self, dbt_cloud_credentials): + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/12/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": {"id": 10000}})) + + response = await get_dbt_cloud_job_info.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=12, + order_by="id", ) - ) - with disable_run_logger(): - result = await trigger_dbt_cloud_job_run.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, + assert response == {"id": 10000} + + async def test_trigger_job_with_no_options(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) ) - assert result == {"id": 10000, "project_id": 12345} + with disable_run_logger(): + result = await trigger_dbt_cloud_job_run.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + ) - request_body = json.loads( - respx_mock_with_pass_through.calls.last.request.content.decode() - ) - assert "Triggered via Prefect" in request_body["cause"] + assert result == {"id": 10000, "project_id": 12345} - async def test_trigger_with_custom_options( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - json={ - "cause": "This is a custom cause", - "git_branch": "staging", - "schema_override": "dbt_cloud_pr_123", - "dbt_version_override": "0.18.0", - "threads_override": 8, - "target_name_override": "staging", - "generate_docs_override": True, - "timeout_seconds_override": 3000, - "steps_override": [ - "dbt seed", - "dbt run --fail-fast", - "dbt test --fail fast", - ], - }, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) + request_body = json.loads(respx_mock.calls.last.request.content.decode()) + assert "Triggered via Prefect" in request_body["cause"] - @flow - async def test_trigger_with_custom_options(): - return await trigger_dbt_cloud_job_run( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - options=TriggerJobRunOptions( - cause="This is a custom cause", - git_branch="staging", - schema_override="dbt_cloud_pr_123", - dbt_version_override="0.18.0", - target_name_override="staging", - timeout_seconds_override=3000, - generate_docs_override=True, - threads_override=8, - steps_override=[ + async def test_trigger_with_custom_options(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + json={ + "cause": "This is a custom cause", + "git_branch": "staging", + "schema_override": "dbt_cloud_pr_123", + "dbt_version_override": "0.18.0", + "threads_override": 8, + "target_name_override": "staging", + "generate_docs_override": True, + "timeout_seconds_override": 3000, + "steps_override": [ "dbt seed", "dbt run --fail-fast", "dbt test --fail fast", ], - ), - ) - - result = await test_trigger_with_custom_options() - assert result == {"id": 10000, "project_id": 12345} - - async def test_trigger_nonexistent_job( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response(404, json={"status": {"user_message": "Not found!"}}) - ) - - @flow - async def test_trigger_nonexistent_job(): - task_shorter_retry = trigger_dbt_cloud_job_run.with_options( - retries=1, retry_delay_seconds=1 - ) - await task_shorter_retry( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - ) - - with pytest.raises(DbtCloudJobRunTriggerFailed, match="Not found!"): - await test_trigger_nonexistent_job() + }, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + + @flow + async def test_trigger_with_custom_options(): + return await trigger_dbt_cloud_job_run( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + options=TriggerJobRunOptions( + cause="This is a custom cause", + git_branch="staging", + schema_override="dbt_cloud_pr_123", + dbt_version_override="0.18.0", + target_name_override="staging", + timeout_seconds_override=3000, + generate_docs_override=True, + threads_override=8, + steps_override=[ + "dbt seed", + "dbt run --fail-fast", + "dbt test --fail fast", + ], + ), + ) + + result = await test_trigger_with_custom_options() + assert result == {"id": 10000, "project_id": 12345} + + async def test_trigger_nonexistent_job(self, dbt_cloud_credentials): + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 404, json={"status": {"user_message": "Not found!"}} + ) + ) + + @flow + async def test_trigger_nonexistent_job(): + task_shorter_retry = trigger_dbt_cloud_job_run.with_options( + retries=1, retry_delay_seconds=1 + ) + await task_shorter_retry( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + ) + + with pytest.raises(DbtCloudJobRunTriggerFailed, match="Not found!"): + await test_trigger_nonexistent_job() async def test_trigger_nonexistent_run_id_no_logs( - self, respx_mock_with_pass_through, dbt_cloud_credentials, caplog + self, dbt_cloud_credentials, caplog ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"project_id": 12345}})) - - @flow - async def trigger_nonexistent_run_id(): - task_shorter_retry = trigger_dbt_cloud_job_run.with_options( - retries=1, retry_delay_seconds=1 - ) - await task_shorter_retry( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - ) - - await trigger_nonexistent_run_id() + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": {"project_id": 12345}})) + + @flow + async def trigger_nonexistent_run_id(): + task_shorter_retry = trigger_dbt_cloud_job_run.with_options( + retries=1, retry_delay_seconds=1 + ) + await task_shorter_retry( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + ) + + await trigger_nonexistent_run_id() class TestTriggerDbtCloudJobRunAndWaitForCompletion: - @pytest.mark.respx(assert_all_called=True) - async def test_run_success( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 10}})) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) - - result = await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, job_id=1 - ) - assert result == { - "id": 10000, - "status": 10, - "artifact_paths": ["manifest.json"], - } - - @pytest.mark.respx(assert_all_called=True) - async def test_run_success_with_wait( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - side_effect=[ - Response(200, json={"data": {"id": 10000, "status": 1}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - Response(200, json={"data": {"id": 10000, "status": 10}}), - ] - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) - - result = await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - poll_frequency_seconds=1, - ) - assert result == { - "id": 10000, - "status": 10, - "artifact_paths": ["manifest.json"], - } - - @pytest.mark.respx(assert_all_called=True) - async def test_run_failure_with_wait_and_retry( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - side_effect=[ - Response(200, json={"data": {"id": 10000, "status": 1}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - Response( - 200, json={"data": {"id": 10000, "status": 20}} - ), # failed status - ] - ) - - with pytest.raises(DbtCloudJobRunFailed): - await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - poll_frequency_seconds=1, - retry_filtered_models_attempts=1, - ) - - @pytest.mark.respx(assert_all_called=True) - async def test_run_with_unexpected_status( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - side_effect=[ - Response(200, json={"data": {"id": 10000, "status": 1}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - Response( - 200, json={"data": {"id": 10000, "status": 42}} - ), # unknown status - ] - ) - - with pytest.raises(ValueError, match="42 is not a valid DbtCloudJobRunStatus"): - await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - poll_frequency_seconds=1, - retry_filtered_models_attempts=0, - ) - - @pytest.mark.respx(assert_all_called=True) - async def test_run_failure_no_run_id( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"project_id": 12345}})) - - with pytest.raises(RuntimeError, match="Unable to determine run ID"): - await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - poll_frequency_seconds=1, - ) - - @pytest.mark.respx(assert_all_called=True) - async def test_run_cancelled_with_wait( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.route(host="127.0.0.1").pass_through() - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - side_effect=[ - Response(200, json={"data": {"id": 10000, "status": 1}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - Response(200, json={"data": {"id": 10000, "status": 30}}), - ] - ) - - with pytest.raises(DbtCloudJobRunCancelled): - await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, - job_id=1, - poll_frequency_seconds=1, - retry_filtered_models_attempts=0, - ) - - @pytest.mark.respx(assert_all_called=True) - async def test_run_timed_out( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - side_effect=[ - Response(200, json={"data": {"id": 10000, "status": 1}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - Response(200, json={"data": {"id": 10000, "status": 3}}), - ] - ) - - with pytest.raises(DbtCloudJobRunTimedOut): - await trigger_dbt_cloud_job_run_and_wait_for_completion( + async def test_run_success(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 10}}) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) + + result = await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, job_id=1 + ) + assert result == { + "id": 10000, + "status": 10, + "artifact_paths": ["manifest.json"], + } + + async def test_run_success_with_wait(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + side_effect=[ + Response(200, json={"data": {"id": 10000, "status": 1}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + Response(200, json={"data": {"id": 10000, "status": 10}}), + ] + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) + + result = await trigger_dbt_cloud_job_run_and_wait_for_completion( dbt_cloud_credentials=dbt_cloud_credentials, job_id=1, poll_frequency_seconds=1, - max_wait_seconds=3, - retry_filtered_models_attempts=0, - ) - - @pytest.mark.respx(assert_all_called=True) - async def test_run_success_failed_artifacts( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 10}})) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", - headers=HEADERS, - ).mock( - return_value=Response( - 500, json={"status": {"user_message": "This is what went wrong"}} ) - ) - - result = await trigger_dbt_cloud_job_run_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, job_id=1 - ) - assert result == {"id": 10000, "status": 10} + assert result == { + "id": 10000, + "status": 10, + "artifact_paths": ["manifest.json"], + } + + async def test_run_failure_with_wait_and_retry(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + side_effect=[ + Response(200, json={"data": {"id": 10000, "status": 1}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + Response( + 200, json={"data": {"id": 10000, "status": 20}} + ), # failed status + ] + ) + + with pytest.raises(DbtCloudJobRunFailed): + await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + poll_frequency_seconds=1, + retry_filtered_models_attempts=1, + ) + + async def test_run_with_unexpected_status(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + side_effect=[ + Response(200, json={"data": {"id": 10000, "status": 1}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + Response( + 200, json={"data": {"id": 10000, "status": 42}} + ), # unknown status + ] + ) + + with pytest.raises( + ValueError, match="42 is not a valid DbtCloudJobRunStatus" + ): + await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + poll_frequency_seconds=1, + retry_filtered_models_attempts=0, + ) + + async def test_run_failure_no_run_id(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": {"project_id": 12345}})) + + with pytest.raises(RuntimeError, match="Unable to determine run ID"): + await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + poll_frequency_seconds=1, + ) + + async def test_run_cancelled_with_wait(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + side_effect=[ + Response(200, json={"data": {"id": 10000, "status": 1}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + Response(200, json={"data": {"id": 10000, "status": 30}}), + ] + ) + + with pytest.raises(DbtCloudJobRunCancelled): + await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + poll_frequency_seconds=1, + retry_filtered_models_attempts=0, + ) + + async def test_run_timed_out(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + side_effect=[ + Response(200, json={"data": {"id": 10000, "status": 1}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + Response(200, json={"data": {"id": 10000, "status": 3}}), + ] + ) + + with pytest.raises(DbtCloudJobRunTimedOut): + await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, + job_id=1, + poll_frequency_seconds=1, + max_wait_seconds=3, + retry_filtered_models_attempts=0, + ) + + async def test_run_success_failed_artifacts(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 10}}) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", + headers=HEADERS, + ).mock( + return_value=Response( + 500, json={"status": {"user_message": "This is what went wrong"}} + ) + ) + + result = await trigger_dbt_cloud_job_run_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, job_id=1 + ) + assert result == {"id": 10000, "status": 10} class TestRetryDbtCloudRunJobSubsetAndWaitForCompletion: - async def test_run_steps_override_error( - self, respx_mock_with_pass_through, dbt_cloud_credentials - ): + async def test_run_steps_override_error(self, dbt_cloud_credentials): with pytest.raises(ValueError, match="Do not set `steps_override"): await retry_dbt_cloud_job_run_subset_and_wait_for_completion( dbt_cloud_credentials=dbt_cloud_credentials, @@ -467,116 +449,117 @@ async def test_retry_run( self, trigger_job_run_options, exe_command, - respx_mock_with_pass_through, dbt_cloud_credentials, ): - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={ - "data": { - "id": 10000, - "generate_docs": False, - "generate_sources": False, - } - }, - ) - ) - - # mock get_dbt_cloud_run_info - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={ - "data": { - "id": 10000, - "status": 20, # failed status - "run_steps": [ - { - "id": 432100123, - "run_id": 10000, - "account_id": 123456789, - "index": 1, - "name": "Clone Git Repository", - "status_humanized": "Success", - }, - { - "id": 432100124, - "run_id": 10000, - "account_id": 123456789, - "index": 2, - "name": "Create Profile from Connection Snowflake ", - "status_humanized": "Success", - }, - { - "id": 432100125, - "run_id": 10000, - "account_id": 123456789, - "index": 3, - "name": "Invoke dbt with `dbt deps`", - "status_humanized": "Success", - }, + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "data": { + "id": 10000, + "generate_docs": False, + "generate_sources": False, + } + }, + ) + ) + + # mock get_dbt_cloud_run_info + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "data": { + "id": 10000, + "status": 20, # failed status + "run_steps": [ + { + "id": 432100123, + "run_id": 10000, + "account_id": 123456789, + "index": 1, + "name": "Clone Git Repository", + "status_humanized": "Success", + }, + { + "id": 432100124, + "run_id": 10000, + "account_id": 123456789, + "index": 2, + "name": "Create Profile from Connection Snowflake ", + "status_humanized": "Success", + }, + { + "id": 432100125, + "run_id": 10000, + "account_id": 123456789, + "index": 3, + "name": "Invoke dbt with `dbt deps`", + "status_humanized": "Success", + }, + { + "run_id": 10000, + "account_id": 123456789, + "index": 4, + "name": f"Invoke dbt with `dbt {exe_command}`", + "status_humanized": "Error", + }, + ], + "job_id": "1", + } + }, + ) + ) + + # mock list_dbt_cloud_run_artifacts + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": ["run_results.json"]})) + + # mock get_dbt_cloud_run_artifact + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/run_results.json", # noqa + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "metadata": {"env": {"DBT_CLOUD_JOB_ID": "1"}}, + "results": [ { - "run_id": 10000, - "account_id": 123456789, - "index": 4, - "name": f"Invoke dbt with `dbt {exe_command}`", - "status_humanized": "Error", + "status": "fail", + "message": "FAIL 1", + "failures": None, + "unique_id": "model.jaffle_shop.stg_customers", }, ], - "job_id": "1", - } - }, - ) - ) - - # mock list_dbt_cloud_run_artifacts - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": ["run_results.json"]})) - - # mock get_dbt_cloud_run_artifact - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/run_results.json", # noqa - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={ - "metadata": {"env": {"DBT_CLOUD_JOB_ID": "1"}}, - "results": [ - { - "status": "fail", - "message": "FAIL 1", - "failures": None, - "unique_id": "model.jaffle_shop.stg_customers", - }, - ], - }, + }, + ) ) - ) - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/1/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) ) - ) - with pytest.raises(DbtCloudJobRunFailed, match="Triggered job run with"): - await retry_dbt_cloud_job_run_subset_and_wait_for_completion( - dbt_cloud_credentials=dbt_cloud_credentials, - run_id=10000, - trigger_job_run_options=trigger_job_run_options, - ) + with pytest.raises(DbtCloudJobRunFailed, match="Triggered job run with"): + await retry_dbt_cloud_job_run_subset_and_wait_for_completion( + dbt_cloud_credentials=dbt_cloud_credentials, + run_id=10000, + trigger_job_run_options=trigger_job_run_options, + ) @pytest.fixture @@ -627,228 +610,253 @@ def test_fail(self): class TestTriggerWaitRetryDbtCloudJobRun: - @pytest.mark.respx(assert_all_called=True) - async def test_run_success(self, respx_mock_with_pass_through, dbt_cloud_job): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 10}})) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) - - result = await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) - assert result == { - "id": 10000, - "status": 10, - "artifact_paths": ["manifest.json"], - } - - @pytest.mark.respx(assert_all_called=True) - async def test_run_timeout(self, respx_mock_with_pass_through, dbt_cloud_job): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 3}})) - - dbt_cloud_job.timeout_seconds = 1 - with pytest.raises(DbtCloudJobRunTimedOut, match="Max wait time of 1"): - await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) + async def test_run_success(self, dbt_cloud_job): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 10}}) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) + + result = await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) + assert result == { + "id": 10000, + "status": 10, + "artifact_paths": ["manifest.json"], + } + + async def test_run_timeout(self, dbt_cloud_job): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 3}}) + ) + + dbt_cloud_job.timeout_seconds = 1 + with pytest.raises(DbtCloudJobRunTimedOut, match="Max wait time of 1"): + await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) @pytest.mark.parametrize( "exe_command", ["run", "run-operation"], ) - async def test_fail(self, respx_mock_with_pass_through, dbt_cloud_job, exe_command): - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={"data": {"id": 10000, "project_id": 12345, "run_steps": [""]}}, - ) - ) - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 20}})) - - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/100000/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={ - "data": { - "id": 10000, - "generate_docs": False, - "generate_sources": False, - } - }, - ) - ) - - # mock get_dbt_cloud_run_info - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={ - "data": { - "id": 10000, - "status": 20, # failed status - "run_steps": [ - { - "id": 432100123, - "run_id": 10000, - "account_id": 123456789, - "index": 1, - "name": "Clone Git Repository", - "status_humanized": "Success", - }, - { - "id": 432100124, - "run_id": 10000, - "account_id": 123456789, - "index": 2, - "name": "Create Profile from Connection Snowflake ", - "status_humanized": "Success", - }, + async def test_fail(self, dbt_cloud_job, exe_command): + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "data": {"id": 10000, "project_id": 12345, "run_steps": [""]} + }, + ) + ) + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 20}}) + ) + + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/100000/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "data": { + "id": 10000, + "generate_docs": False, + "generate_sources": False, + } + }, + ) + ) + + # mock get_dbt_cloud_run_info + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "data": { + "id": 10000, + "status": 20, # failed status + "run_steps": [ + { + "id": 432100123, + "run_id": 10000, + "account_id": 123456789, + "index": 1, + "name": "Clone Git Repository", + "status_humanized": "Success", + }, + { + "id": 432100124, + "run_id": 10000, + "account_id": 123456789, + "index": 2, + "name": "Create Profile from Connection Snowflake ", + "status_humanized": "Success", + }, + { + "id": 432100125, + "run_id": 10000, + "account_id": 123456789, + "index": 3, + "name": "Invoke dbt with `dbt deps`", + "status_humanized": "Success", + }, + { + "run_id": 10000, + "account_id": 123456789, + "index": 4, + "name": f"Invoke dbt with `dbt {exe_command}`", + "status_humanized": "Error", + }, + ], + "job_id": "1", + } + }, + ) + ) + + # mock list_dbt_cloud_run_artifacts + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", + headers=HEADERS, + ).mock(return_value=Response(200, json={"data": ["run_results.json"]})) + + # mock get_dbt_cloud_run_artifact + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/run_results.json", # noqa + headers=HEADERS, + ).mock( + return_value=Response( + 200, + json={ + "metadata": {"env": {"DBT_CLOUD_JOB_ID": "1"}}, + "results": [ { - "id": 432100125, - "run_id": 10000, - "account_id": 123456789, - "index": 3, - "name": "Invoke dbt with `dbt deps`", - "status_humanized": "Success", - }, - { - "run_id": 10000, - "account_id": 123456789, - "index": 4, - "name": f"Invoke dbt with `dbt {exe_command}`", - "status_humanized": "Error", + "status": "fail", + "message": "FAIL 1", + "failures": None, + "unique_id": "model.jaffle_shop.stg_customers", }, ], - "job_id": "1", - } - }, - ) - ) - - # mock list_dbt_cloud_run_artifacts - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": ["run_results.json"]})) - - # mock get_dbt_cloud_run_artifact - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/artifacts/run_results.json", # noqa - headers=HEADERS, - ).mock( - return_value=Response( - 200, - json={ - "metadata": {"env": {"DBT_CLOUD_JOB_ID": "1"}}, - "results": [ - { - "status": "fail", - "message": "FAIL 1", - "failures": None, - "unique_id": "model.jaffle_shop.stg_customers", - }, - ], - }, - ) - ) - - with pytest.raises(DbtCloudJobRunFailed, match="dbt Cloud job 10000"): - await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) - - @pytest.mark.respx(assert_all_called=True) - async def test_cancel(self, respx_mock_with_pass_through, dbt_cloud_job): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", - headers=HEADERS, - ).mock( - return_value=Response( - 200, json={"data": {"id": 10000, "project_id": 12345}} - ) - ) - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 30}})) - - with pytest.raises(DbtCloudJobRunCancelled, match="dbt Cloud job 10000"): - await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) - - @pytest.mark.respx(assert_all_called=True) - async def test_fetch_result_running(self, respx_mock, dbt_cloud_job): - respx_mock.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + }, + ) + ) + + with pytest.raises(DbtCloudJobRunFailed, match="dbt Cloud job 10000"): + await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) + + async def test_cancel(self, dbt_cloud_job): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 30}}) + ) + + with pytest.raises(DbtCloudJobRunCancelled, match="dbt Cloud job 10000"): + await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job) + + async def test_fetch_result_running(self, dbt_cloud_job): + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 200, json={"data": {"id": 10000, "project_id": 12345}} + ) + ) + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", + headers=HEADERS, + ).mock( + return_value=Response(200, json={"data": {"id": 10000, "status": 3}}) + ) + + with pytest.raises(DbtCloudJobRunIncomplete, match="dbt Cloud job 10000"): + run = await dbt_cloud_job.trigger() + await run.fetch_result() + + async def test_fail_auth(self, dbt_cloud_job): + with respx.mock(using="httpx") as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.post( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", + headers=HEADERS, + ).mock( + return_value=Response( + 404, json={"status": {"user_message": "Not found"}} + ) + ) + with pytest.raises(DbtCloudJobRunTriggerFailed, match="Not found"): + await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job, targeted_retries=0) + + +def test_get_job(dbt_cloud_job): + with respx.mock(using="httpx", assert_all_called=False) as respx_mock: + respx_mock.route(host="127.0.0.1").pass_through() + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/", headers=HEADERS, ).mock( return_value=Response( 200, json={"data": {"id": 10000, "project_id": 12345}} ) ) - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/10000/", - headers=HEADERS, - ).mock(return_value=Response(200, json={"data": {"id": 10000, "status": 3}})) - - with pytest.raises(DbtCloudJobRunIncomplete, match="dbt Cloud job 10000"): - run = await dbt_cloud_job.trigger() - await run.fetch_result() - - @pytest.mark.respx(assert_all_called=True) - async def test_fail_auth(self, respx_mock_with_pass_through, dbt_cloud_job): - respx_mock_with_pass_through.post( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/run/", - headers=HEADERS, - ).mock( - return_value=Response(404, json={"status": {"user_message": "Not found"}}) - ) - with pytest.raises(DbtCloudJobRunTriggerFailed, match="Not found"): - await run_dbt_cloud_job(dbt_cloud_job=dbt_cloud_job, targeted_retries=0) - - -def test_get_job(respx_mock_with_pass_through, dbt_cloud_job): - respx_mock_with_pass_through.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/jobs/10000/", - headers=HEADERS, - ).mock( - return_value=Response(200, json={"data": {"id": 10000, "project_id": 12345}}) - ) - assert dbt_cloud_job.get_job()["id"] == 10000 + assert dbt_cloud_job.get_job()["id"] == 10000 diff --git a/src/integrations/prefect-dbt/tests/cloud/test_runs.py b/src/integrations/prefect-dbt/tests/cloud/test_runs.py index cbb4fb713d1a..85fe89225cff 100644 --- a/src/integrations/prefect-dbt/tests/cloud/test_runs.py +++ b/src/integrations/prefect-dbt/tests/cloud/test_runs.py @@ -1,4 +1,5 @@ import pytest +import respx from httpx import Response from prefect_dbt.cloud.runs import ( DbtCloudGetRunArtifactFailed, @@ -11,163 +12,176 @@ class TestGetDbtCloudRunInfo: - async def test_get_dbt_cloud_run_info(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/", - headers={"Authorization": "Bearer my_api_key"}, - ).mock(return_value=Response(200, json={"data": {"id": 10000}})) - - response = await get_dbt_cloud_run_info.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - run_id=12, - ) - - assert response == {"id": 10000} - - async def test_get_nonexistent_run(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/", - headers={"Authorization": "Bearer my_api_key"}, - ).mock( - return_value=Response(404, json={"status": {"user_message": "Not found!"}}) - ) - with pytest.raises(DbtCloudGetRunFailed, match="Not found!"): - await get_dbt_cloud_run_info.fn( + async def test_get_dbt_cloud_run_info(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/", + headers={"Authorization": "Bearer my_api_key"}, + ).mock(return_value=Response(200, json={"data": {"id": 10000}})) + + response = await get_dbt_cloud_run_info.fn( dbt_cloud_credentials=dbt_cloud_credentials, run_id=12, ) + assert response == {"id": 10000} + + async def test_get_nonexistent_run(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/", + headers={"Authorization": "Bearer my_api_key"}, + ).mock( + return_value=Response( + 404, json={"status": {"user_message": "Not found!"}} + ) + ) + with pytest.raises(DbtCloudGetRunFailed, match="Not found!"): + await get_dbt_cloud_run_info.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + run_id=12, + ) + class TestDbtCloudListRunArtifacts: - async def test_list_artifacts_success(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/", - headers={"Authorization": "Bearer my_api_key"}, - ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) - - response = await list_dbt_cloud_run_artifacts.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - run_id=12, - ) - - assert response == ["manifest.json"] - - async def test_list_artifacts_with_step(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/?step=1", # noqa - headers={"Authorization": "Bearer my_api_key"}, - ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) - - response = await list_dbt_cloud_run_artifacts.fn( - dbt_cloud_credentials=dbt_cloud_credentials, run_id=12, step=1 - ) - - assert response == ["manifest.json"] - - async def test_list_artifacts_failure(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/", - headers={"Authorization": "Bearer my_api_key"}, - ).mock( - return_value=Response( - 500, json={"status": {"user_message": "This is what went wrong"}} - ) - ) - with pytest.raises( - DbtCloudListRunArtifactsFailed, match="This is what went wrong" - ): - await list_dbt_cloud_run_artifacts.fn( + async def test_list_artifacts_success(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/", + headers={"Authorization": "Bearer my_api_key"}, + ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) + + response = await list_dbt_cloud_run_artifacts.fn( dbt_cloud_credentials=dbt_cloud_credentials, run_id=12, ) + assert response == ["manifest.json"] + + async def test_list_artifacts_with_step(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/?step=1", # noqa + headers={"Authorization": "Bearer my_api_key"}, + ).mock(return_value=Response(200, json={"data": ["manifest.json"]})) + + response = await list_dbt_cloud_run_artifacts.fn( + dbt_cloud_credentials=dbt_cloud_credentials, run_id=12, step=1 + ) + + assert response == ["manifest.json"] + + async def test_list_artifacts_failure(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/", + headers={"Authorization": "Bearer my_api_key"}, + ).mock( + return_value=Response( + 500, json={"status": {"user_message": "This is what went wrong"}} + ) + ) + with pytest.raises( + DbtCloudListRunArtifactsFailed, match="This is what went wrong" + ): + await list_dbt_cloud_run_artifacts.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + run_id=12, + ) + class TestDbtCloudGetRunArtifact: - async def test_get_artifact_success(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/manifest.json", # noqa - headers={"Authorization": "Bearer my_api_key"}, - ).mock( - return_value=Response( - 200, - json={ - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", # noqa - "dbt_version": "1.1.1", - } - }, + async def test_get_artifact_success(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/manifest.json", # noqa + headers={"Authorization": "Bearer my_api_key"}, + ).mock( + return_value=Response( + 200, + json={ + "metadata": { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", # noqa + "dbt_version": "1.1.1", + } + }, + ) ) - ) - response = await get_dbt_cloud_run_artifact.fn( - dbt_cloud_credentials=dbt_cloud_credentials, run_id=12, path="manifest.json" - ) + response = await get_dbt_cloud_run_artifact.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + run_id=12, + path="manifest.json", + ) - assert response == { - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", - "dbt_version": "1.1.1", + assert response == { + "metadata": { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "dbt_version": "1.1.1", + } } - } - - async def test_get_non_json_artifact(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/compiled/dbt_artifacts/models/dim_dbt__current_models.sql", # noqa - headers={"Authorization": "Bearer my_api_key"}, - ).mock(return_value=Response(200, text="Hi! I'm some SQL!")) - - response = await get_dbt_cloud_run_artifact.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - run_id=12, - path="compiled/dbt_artifacts/models/dim_dbt__current_models.sql", - ) - - assert response == "Hi! I'm some SQL!" - - async def test_get_artifact_with_step(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/manifest.json?step=1", # noqa - headers={"Authorization": "Bearer my_api_key"}, - ).mock( - return_value=Response( - 200, - json={ - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", # noqa - "dbt_version": "1.1.1", - } - }, + + async def test_get_non_json_artifact(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/compiled/dbt_artifacts/models/dim_dbt__current_models.sql", # noqa + headers={"Authorization": "Bearer my_api_key"}, + ).mock(return_value=Response(200, text="Hi! I'm some SQL!")) + + response = await get_dbt_cloud_run_artifact.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + run_id=12, + path="compiled/dbt_artifacts/models/dim_dbt__current_models.sql", ) - ) - - response = await get_dbt_cloud_run_artifact.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - run_id=12, - path="manifest.json", - step=1, - ) - - assert response == { - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", - "dbt_version": "1.1.1", - } - } - - async def test_get_artifact_failure(self, respx_mock, dbt_cloud_credentials): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/manifest.json", # noqa - headers={"Authorization": "Bearer my_api_key"}, - ).mock( - return_value=Response( - 500, json={"status": {"user_message": "This is what went wrong"}} + + assert response == "Hi! I'm some SQL!" + + async def test_get_artifact_with_step(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/manifest.json?step=1", # noqa + headers={"Authorization": "Bearer my_api_key"}, + ).mock( + return_value=Response( + 200, + json={ + "metadata": { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", # noqa + "dbt_version": "1.1.1", + } + }, + ) ) - ) - with pytest.raises( - DbtCloudGetRunArtifactFailed, match="This is what went wrong" - ): - await get_dbt_cloud_run_artifact.fn( + + response = await get_dbt_cloud_run_artifact.fn( dbt_cloud_credentials=dbt_cloud_credentials, run_id=12, path="manifest.json", + step=1, + ) + + assert response == { + "metadata": { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "dbt_version": "1.1.1", + } + } + + async def test_get_artifact_failure(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/manifest.json", # noqa + headers={"Authorization": "Bearer my_api_key"}, + ).mock( + return_value=Response( + 500, json={"status": {"user_message": "This is what went wrong"}} + ) ) + with pytest.raises( + DbtCloudGetRunArtifactFailed, match="This is what went wrong" + ): + await get_dbt_cloud_run_artifact.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + run_id=12, + path="manifest.json", + ) diff --git a/src/integrations/prefect-dbt/tests/cloud/test_utils.py b/src/integrations/prefect-dbt/tests/cloud/test_utils.py index b3fee6f13661..7f7f3561a867 100644 --- a/src/integrations/prefect-dbt/tests/cloud/test_utils.py +++ b/src/integrations/prefect-dbt/tests/cloud/test_utils.py @@ -1,4 +1,5 @@ import pytest +import respx from httpx import Response from prefect_dbt.cloud.utils import ( DbtCloudAdministrativeApiCallFailed, @@ -7,64 +8,67 @@ class TestCallDbtCloudAdministrativeApiEndpoint: - async def test_endpoint_returns_json(self, dbt_cloud_credentials, respx_mock): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/projects/", - headers={"Authorization": "Bearer my_api_key"}, - ).mock( - return_value=Response( - 200, - json={ - "status": { - "code": 200, - "is_success": True, - "user_message": "Success!", - "developer_message": "", + async def test_endpoint_returns_json(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/projects/", + headers={"Authorization": "Bearer my_api_key"}, + ).mock( + return_value=Response( + 200, + json={ + "status": { + "code": 200, + "is_success": True, + "user_message": "Success!", + "developer_message": "", + }, + "data": [], }, - "data": [], - }, + ) ) - ) - - result = await call_dbt_cloud_administrative_api_endpoint.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - path="/projects/", - http_method="GET", - ) - - assert result == { - "status": { - "code": 200, - "is_success": True, - "user_message": "Success!", - "developer_message": "", - }, - "data": [], - } - - async def test_endpoint_returns_text(self, dbt_cloud_credentials, respx_mock): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/compiled/dbt_artifacts/models/dim_dbt__current_models.sql", # noqa - headers={"Authorization": "Bearer my_api_key"}, - ).mock(return_value=Response(200, text="Hi! I'm some SQL!")) - result = await call_dbt_cloud_administrative_api_endpoint.fn( - dbt_cloud_credentials=dbt_cloud_credentials, - path="/runs/12/artifacts/compiled/dbt_artifacts/models/dim_dbt__current_models.sql", # noqa - http_method="GET", - ) + result = await call_dbt_cloud_administrative_api_endpoint.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + path="/projects/", + http_method="GET", + ) - assert result == "Hi! I'm some SQL!" + assert result == { + "status": { + "code": 200, + "is_success": True, + "user_message": "Success!", + "developer_message": "", + }, + "data": [], + } - async def test_failure(self, dbt_cloud_credentials, respx_mock): - respx_mock.get( - "https://cloud.getdbt.com/api/v2/accounts/123456789/projects/", - headers={"Authorization": "Bearer my_api_key"}, - ).mock(return_value=Response(500, json={})) + async def test_endpoint_returns_text(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/runs/12/artifacts/compiled/dbt_artifacts/models/dim_dbt__current_models.sql", # noqa + headers={"Authorization": "Bearer my_api_key"}, + ).mock(return_value=Response(200, text="Hi! I'm some SQL!")) - with pytest.raises(DbtCloudAdministrativeApiCallFailed): - await call_dbt_cloud_administrative_api_endpoint.fn( + result = await call_dbt_cloud_administrative_api_endpoint.fn( dbt_cloud_credentials=dbt_cloud_credentials, - path="/projects/", + path="/runs/12/artifacts/compiled/dbt_artifacts/models/dim_dbt__current_models.sql", # noqa http_method="GET", ) + + assert result == "Hi! I'm some SQL!" + + async def test_failure(self, dbt_cloud_credentials): + with respx.mock(using="httpx") as respx_mock: + respx_mock.get( + "https://cloud.getdbt.com/api/v2/accounts/123456789/projects/", + headers={"Authorization": "Bearer my_api_key"}, + ).mock(return_value=Response(500, json={})) + + with pytest.raises(DbtCloudAdministrativeApiCallFailed): + await call_dbt_cloud_administrative_api_endpoint.fn( + dbt_cloud_credentials=dbt_cloud_credentials, + path="/projects/", + http_method="GET", + ) diff --git a/src/integrations/prefect-dbt/tests/test_utilities.py b/src/integrations/prefect-dbt/tests/test_utilities.py new file mode 100644 index 000000000000..736c04386cac --- /dev/null +++ b/src/integrations/prefect-dbt/tests/test_utilities.py @@ -0,0 +1,74 @@ +import os +from pathlib import Path + +import pytest +import yaml +from prefect_dbt.utilities import get_profiles_dir, load_profiles_yml + +SAMPLE_PROFILES = { + "jaffle_shop": { + "outputs": { + "dev": { + "type": "duckdb", + "path": "jaffle_shop.duckdb", + "schema": "main", + "threads": 4, + } + } + } +} + + +@pytest.fixture +def temp_profiles_dir(tmp_path): + profiles_dir = tmp_path / ".dbt" + profiles_dir.mkdir() + + profiles_path = profiles_dir / "profiles.yml" + with open(profiles_path, "w") as f: + yaml.dump(SAMPLE_PROFILES, f) + + return str(profiles_dir) + + +def test_get_profiles_dir_default(): + if "DBT_PROFILES_DIR" in os.environ: + del os.environ["DBT_PROFILES_DIR"] + + expected = os.path.expanduser("~/.dbt") + assert get_profiles_dir() == expected + + +def test_get_profiles_dir_from_env(monkeypatch): + test_path = "/custom/path" + monkeypatch.setenv("DBT_PROFILES_DIR", test_path) + assert get_profiles_dir() == test_path + + +def test_load_profiles_yml_success(temp_profiles_dir): + profiles = load_profiles_yml(temp_profiles_dir) + assert profiles == SAMPLE_PROFILES + + +def test_load_profiles_yml_default_dir(monkeypatch, temp_profiles_dir): + monkeypatch.setenv("DBT_PROFILES_DIR", temp_profiles_dir) + profiles = load_profiles_yml(None) + assert profiles == SAMPLE_PROFILES + + +def test_load_profiles_yml_file_not_found(): + nonexistent_dir = "/path/that/does/not/exist" + with pytest.raises( + ValueError, + match=f"No profiles.yml found at {os.path.join(nonexistent_dir, 'profiles.yml')}", + ): + load_profiles_yml(nonexistent_dir) + + +def test_load_profiles_yml_invalid_yaml(temp_profiles_dir): + profiles_path = Path(temp_profiles_dir) / "profiles.yml" + with open(profiles_path, "w") as f: + f.write("invalid: yaml: content:\nindentation error") + + with pytest.raises(yaml.YAMLError): + load_profiles_yml(temp_profiles_dir) From 5746a9851a1cd4f965376980069ddee70fdd3da8 Mon Sep 17 00:00:00 2001 From: Alexander Streed Date: Wed, 4 Dec 2024 09:52:50 -0600 Subject: [PATCH 05/92] Add type completeness check to CI (#16194) --- .github/workflows/static-analysis.yaml | 49 ++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/.github/workflows/static-analysis.yaml b/.github/workflows/static-analysis.yaml index aba7bf3b6b25..0e31c210c498 100644 --- a/.github/workflows/static-analysis.yaml +++ b/.github/workflows/static-analysis.yaml @@ -67,3 +67,52 @@ jobs: - name: Run pre-commit run: | pre-commit run --show-diff-on-failure --color=always --all-files + + type-completeness-check: + name: Type completeness check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Set up uv + uses: astral-sh/setup-uv@v4 + with: + python-version: "3.12" + + - name: Calculate type completeness score + id: calculate_current_score + run: | + # `pyright` will exit with a non-zero status code if it finds any issues, + # so we need to explicitly ignore the exit code with `|| true`. + uv tool run --with . pyright --verifytypes prefect --ignoreexternal --outputjson > prefect-analysis.json || true + SCORE=$(jq -r '.typeCompleteness.completenessScore' prefect-analysis.json) + echo "current_score=$SCORE" >> $GITHUB_OUTPUT + + - name: Checkout base branch + run: | + git checkout ${{ github.base_ref }} + + - name: Calculate base branch score + id: calculate_base_score + run: | + uv tool run --with . pyright --verifytypes prefect --ignoreexternal --outputjson > prefect-analysis-base.json || true + BASE_SCORE=$(jq -r '.typeCompleteness.completenessScore' prefect-analysis-base.json) + echo "base_score=$BASE_SCORE" >> $GITHUB_OUTPUT + + - name: Compare scores + run: | + CURRENT_SCORE=$(echo ${{ steps.calculate_current_score.outputs.current_score }}) + BASE_SCORE=$(echo ${{ steps.calculate_base_score.outputs.base_score }}) + + if (( $(echo "$BASE_SCORE > $CURRENT_SCORE" | bc -l) )); then + echo "❌ Type completeness score has decreased from $BASE_SCORE to $CURRENT_SCORE" >> $GITHUB_STEP_SUMMARY + echo "Please add type annotations to your code to increase the type completeness score." >> $GITHUB_STEP_SUMMARY + exit 1 + elif (( $(echo "$BASE_SCORE < $CURRENT_SCORE" | bc -l) )); then + echo "✅ Type completeness score has increased from $BASE_SCORE to $CURRENT_SCORE" >> $GITHUB_STEP_SUMMARY + else + echo "✅ Type completeness score remained unchanged at $BASE_SCORE" >> $GITHUB_STEP_SUMMARY + fi From a6e76f962b9cb510af9ad5e676d8d3405e108b17 Mon Sep 17 00:00:00 2001 From: nate nowack Date: Wed, 4 Dec 2024 11:07:14 -0600 Subject: [PATCH 06/92] bump required minor versions of core (#16192) --- src/integrations/prefect-azure/pyproject.toml | 2 +- src/integrations/prefect-docker/pyproject.toml | 2 +- src/integrations/prefect-gcp/pyproject.toml | 2 +- src/integrations/prefect-kubernetes/pyproject.toml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/integrations/prefect-azure/pyproject.toml b/src/integrations/prefect-azure/pyproject.toml index 60bb213454e3..84d0cb3deb87 100644 --- a/src/integrations/prefect-azure/pyproject.toml +++ b/src/integrations/prefect-azure/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "azure_identity>=1.10", "azure_mgmt_containerinstance>=10.0", "azure-mgmt-resource>=21.2", - "prefect>=3.0.0", + "prefect>=3.1.1", "setuptools", #required in 3.12 to get pkg_resources (used by azureml.core) ] dynamic = ["version"] diff --git a/src/integrations/prefect-docker/pyproject.toml b/src/integrations/prefect-docker/pyproject.toml index e947a46f52f9..3afb5cb89440 100644 --- a/src/integrations/prefect-docker/pyproject.toml +++ b/src/integrations/prefect-docker/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries", ] -dependencies = ["prefect>=3.0.0", "docker>=6.1.1", "exceptiongroup"] +dependencies = ["prefect>=3.1.1", "docker>=6.1.1", "exceptiongroup"] dynamic = ["version"] [project.optional-dependencies] diff --git a/src/integrations/prefect-gcp/pyproject.toml b/src/integrations/prefect-gcp/pyproject.toml index 21c9e5b1e855..2e8fdbb79b08 100644 --- a/src/integrations/prefect-gcp/pyproject.toml +++ b/src/integrations/prefect-gcp/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Topic :: Software Development :: Libraries", ] dependencies = [ - "prefect>=3.0.0", + "prefect>=3.1.1", "google-api-python-client>=2.20.0", "google-cloud-storage>=2.0.0", "tenacity>=8.0.0", diff --git a/src/integrations/prefect-kubernetes/pyproject.toml b/src/integrations/prefect-kubernetes/pyproject.toml index 427f4c839aea..e6ac3b037af4 100644 --- a/src/integrations/prefect-kubernetes/pyproject.toml +++ b/src/integrations/prefect-kubernetes/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] dependencies = [ - "prefect>=3.1.0", + "prefect>=3.1.1", "kubernetes-asyncio>=29.0.0", "tenacity>=8.2.3", "exceptiongroup", From 56dc4635bbda884921325da4274c7992984b9882 Mon Sep 17 00:00:00 2001 From: Adam Azzam <33043305+aaazzam@users.noreply.github.com> Date: Wed, 4 Dec 2024 12:37:05 -0500 Subject: [PATCH 07/92] improve typing of variables.py (#16216) --- src/prefect/variables.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/prefect/variables.py b/src/prefect/variables.py index 1e785baa2166..a16d3fe81750 100644 --- a/src/prefect/variables.py +++ b/src/prefect/variables.py @@ -73,17 +73,18 @@ def my_flow(): raise ValueError( f"Variable {name!r} already exists. Use `overwrite=True` to update it." ) - await client.update_variable(variable=VariableUpdate(**var_dict)) + await client.update_variable( + variable=VariableUpdate.model_validate(var_dict) + ) variable = await client.read_variable_by_name(name) - var_dict = { - "name": variable.name, - "value": variable.value, - "tags": variable.tags or [], - } + for key in var_dict.keys(): + var_dict.update({key: getattr(variable, key)}) else: - await client.create_variable(variable=VariableCreate(**var_dict)) + await client.create_variable( + variable=VariableCreate.model_validate(var_dict) + ) - return cls(**var_dict) + return cls.model_validate(var_dict) @classmethod @sync_compatible From 8635065f4900718fdc73b431b4782a4ea4dc8a94 Mon Sep 17 00:00:00 2001 From: Jean Luciano Date: Wed, 4 Dec 2024 11:39:27 -0600 Subject: [PATCH 08/92] Instrument task runs (#15955) --- src/prefect/task_engine.py | 71 ++++++-- src/prefect/telemetry/run_telemetry.py | 77 ++++++++ tests/telemetry/test_instrumentation.py | 228 +++++++++++++++++++++++- 3 files changed, 360 insertions(+), 16 deletions(-) create mode 100644 src/prefect/telemetry/run_telemetry.py diff --git a/src/prefect/task_engine.py b/src/prefect/task_engine.py index 5accda613f34..fa879d4265d9 100644 --- a/src/prefect/task_engine.py +++ b/src/prefect/task_engine.py @@ -29,6 +29,7 @@ import anyio import pendulum +from opentelemetry import trace from typing_extensions import ParamSpec from prefect import Task @@ -79,6 +80,7 @@ exception_to_failed_state, return_value_to_state, ) +from prefect.telemetry.run_telemetry import RunTelemetry from prefect.transactions import IsolationLevel, Transaction, transaction from prefect.utilities.annotations import NotSet from prefect.utilities.asyncutils import run_coro_as_sync @@ -120,6 +122,7 @@ class BaseTaskRunEngine(Generic[P, R]): _is_started: bool = False _task_name_set: bool = False _last_event: Optional[PrefectEvent] = None + _telemetry: RunTelemetry = field(default_factory=RunTelemetry) def __post_init__(self): if self.parameters is None: @@ -465,7 +468,7 @@ def set_state(self, state: State, force: bool = False) -> State: validated_state=self.task_run.state, follows=self._last_event, ) - + self._telemetry.update_state(new_state) return new_state def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]": @@ -519,6 +522,8 @@ def handle_success(self, result: R, transaction: Transaction) -> R: self.record_terminal_state_timing(terminal_state) self.set_state(terminal_state) self._return_value = result + + self._telemetry.end_span_on_success(terminal_state.message) return result def handle_retry(self, exc: Exception) -> bool: @@ -567,6 +572,7 @@ def handle_retry(self, exc: Exception) -> bool: def handle_exception(self, exc: Exception) -> None: # If the task fails, and we have retries left, set the task to retrying. + self._telemetry.record_exception(exc) if not self.handle_retry(exc): # If the task has no retries left, or the retry condition is not met, set the task to failed. state = run_coro_as_sync( @@ -580,6 +586,7 @@ def handle_exception(self, exc: Exception) -> None: self.record_terminal_state_timing(state) self.set_state(state) self._raised = exc + self._telemetry.end_span_on_failure(state.message) def handle_timeout(self, exc: TimeoutError) -> None: if not self.handle_retry(exc): @@ -603,6 +610,8 @@ def handle_crash(self, exc: BaseException) -> None: self.record_terminal_state_timing(state) self.set_state(state, force=True) self._raised = exc + self._telemetry.record_exception(exc) + self._telemetry.end_span_on_failure(state.message) @contextmanager def setup_run_context(self, client: Optional[SyncPrefectClient] = None): @@ -660,14 +669,17 @@ def initialize_run( with SyncClientContext.get_or_create() as client_ctx: self._client = client_ctx.client self._is_started = True + flow_run_context = FlowRunContext.get() + parent_task_run_context = TaskRunContext.get() + try: if not self.task_run: self.task_run = run_coro_as_sync( self.task.create_local_run( id=task_run_id, parameters=self.parameters, - flow_run_context=FlowRunContext.get(), - parent_task_run_context=TaskRunContext.get(), + flow_run_context=flow_run_context, + parent_task_run_context=parent_task_run_context, wait_for=self.wait_for, extra_task_inputs=dependencies, ) @@ -684,6 +696,13 @@ def initialize_run( self.logger.debug( f"Created task run {self.task_run.name!r} for task {self.task.name!r}" ) + labels = ( + flow_run_context.flow_run.labels if flow_run_context else {} + ) + self._telemetry.start_span( + self.task_run, self.parameters, labels + ) + yield self except TerminationSignal as exc: @@ -735,11 +754,12 @@ def start( dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None, ) -> Generator[None, None, None]: with self.initialize_run(task_run_id=task_run_id, dependencies=dependencies): - self.begin_run() - try: - yield - finally: - self.call_hooks() + with trace.use_span(self._telemetry._span): + self.begin_run() + try: + yield + finally: + self.call_hooks() @contextmanager def transaction_context(self) -> Generator[Transaction, None, None]: @@ -987,6 +1007,7 @@ async def set_state(self, state: State, force: bool = False) -> State: follows=self._last_event, ) + self._telemetry.update_state(new_state) return new_state async def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]": @@ -1035,6 +1056,9 @@ async def handle_success(self, result: R, transaction: Transaction) -> R: self.record_terminal_state_timing(terminal_state) await self.set_state(terminal_state) self._return_value = result + + self._telemetry.end_span_on_success(terminal_state.message) + return result async def handle_retry(self, exc: Exception) -> bool: @@ -1083,6 +1107,7 @@ async def handle_retry(self, exc: Exception) -> bool: async def handle_exception(self, exc: Exception) -> None: # If the task fails, and we have retries left, set the task to retrying. + self._telemetry.record_exception(exc) if not await self.handle_retry(exc): # If the task has no retries left, or the retry condition is not met, set the task to failed. state = await exception_to_failed_state( @@ -1094,7 +1119,10 @@ async def handle_exception(self, exc: Exception) -> None: await self.set_state(state) self._raised = exc + self._telemetry.end_span_on_failure(state.message) + async def handle_timeout(self, exc: TimeoutError) -> None: + self._telemetry.record_exception(exc) if not await self.handle_retry(exc): if isinstance(exc, TaskRunTimeoutError): message = f"Task run exceeded timeout of {self.task.timeout_seconds} second(s)" @@ -1108,6 +1136,7 @@ async def handle_timeout(self, exc: TimeoutError) -> None: ) await self.set_state(state) self._raised = exc + self._telemetry.end_span_on_failure(state.message) async def handle_crash(self, exc: BaseException) -> None: state = await exception_to_crashed_state(exc) @@ -1117,6 +1146,9 @@ async def handle_crash(self, exc: BaseException) -> None: await self.set_state(state, force=True) self._raised = exc + self._telemetry.record_exception(exc) + self._telemetry.end_span_on_failure(state.message) + @asynccontextmanager async def setup_run_context(self, client: Optional[PrefectClient] = None): from prefect.utilities.engine import ( @@ -1172,12 +1204,14 @@ async def initialize_run( async with AsyncClientContext.get_or_create(): self._client = get_client() self._is_started = True + flow_run_context = FlowRunContext.get() + try: if not self.task_run: self.task_run = await self.task.create_local_run( id=task_run_id, parameters=self.parameters, - flow_run_context=FlowRunContext.get(), + flow_run_context=flow_run_context, parent_task_run_context=TaskRunContext.get(), wait_for=self.wait_for, extra_task_inputs=dependencies, @@ -1194,6 +1228,14 @@ async def initialize_run( self.logger.debug( f"Created task run {self.task_run.name!r} for task {self.task.name!r}" ) + + labels = ( + flow_run_context.flow_run.labels if flow_run_context else {} + ) + self._telemetry.start_span( + self.task_run, self.parameters, labels + ) + yield self except TerminationSignal as exc: @@ -1247,11 +1289,12 @@ async def start( async with self.initialize_run( task_run_id=task_run_id, dependencies=dependencies ): - await self.begin_run() - try: - yield - finally: - await self.call_hooks() + with trace.use_span(self._telemetry._span): + await self.begin_run() + try: + yield + finally: + await self.call_hooks() @asynccontextmanager async def transaction_context(self) -> AsyncGenerator[Transaction, None]: diff --git a/src/prefect/telemetry/run_telemetry.py b/src/prefect/telemetry/run_telemetry.py new file mode 100644 index 000000000000..bc2c36fc68ab --- /dev/null +++ b/src/prefect/telemetry/run_telemetry.py @@ -0,0 +1,77 @@ +import time +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, Dict, Optional + +from opentelemetry.trace import ( + Status, + StatusCode, + get_tracer, +) + +import prefect +from prefect.client.schemas import TaskRun +from prefect.client.schemas.objects import State + +if TYPE_CHECKING: + from opentelemetry.sdk.trace import Tracer + + +@dataclass +class RunTelemetry: + _tracer: "Tracer" = field( + default_factory=lambda: get_tracer("prefect", prefect.__version__) + ) + _span = None + + def start_span( + self, + task_run: TaskRun, + parameters: Optional[Dict[str, Any]] = None, + labels: Optional[Dict[str, Any]] = None, + ): + if parameters is None: + parameters = {} + if labels is None: + labels = {} + parameter_attributes = { + f"prefect.run.parameter.{k}": type(v).__name__ + for k, v in parameters.items() + } + self._span = self._tracer.start_span( + name=task_run.name, + attributes={ + "prefect.run.type": "task", + "prefect.run.id": str(task_run.id), + "prefect.tags": task_run.tags, + **parameter_attributes, + **labels, + }, + ) + + def end_span_on_success(self, terminal_message: str): + if self._span: + self._span.set_status(Status(StatusCode.OK), terminal_message) + self._span.end(time.time_ns()) + self._span = None + + def end_span_on_failure(self, terminal_message: str): + if self._span: + self._span.set_status(Status(StatusCode.ERROR, terminal_message)) + self._span.end(time.time_ns()) + self._span = None + + def record_exception(self, exc: Exception): + if self._span: + self._span.record_exception(exc) + + def update_state(self, new_state: State): + if self._span: + self._span.add_event( + new_state.name, + { + "prefect.state.message": new_state.message or "", + "prefect.state.type": new_state.type, + "prefect.state.name": new_state.name or new_state.type, + "prefect.state.id": str(new_state.id), + }, + ) diff --git a/tests/telemetry/test_instrumentation.py b/tests/telemetry/test_instrumentation.py index a86e4f725b9e..1ea74ff55872 100644 --- a/tests/telemetry/test_instrumentation.py +++ b/tests/telemetry/test_instrumentation.py @@ -1,5 +1,5 @@ import os -from uuid import UUID +from uuid import UUID, uuid4 import pytest from opentelemetry import metrics, trace @@ -11,9 +11,17 @@ from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader from opentelemetry.sdk.trace import TracerProvider +from tests.telemetry.instrumentation_tester import InstrumentationTester +from prefect import flow, task +from prefect.task_engine import ( + run_task_async, + run_task_sync, +) from prefect.telemetry.bootstrap import setup_telemetry -from prefect.telemetry.instrumentation import extract_account_and_workspace_id +from prefect.telemetry.instrumentation import ( + extract_account_and_workspace_id, +) from prefect.telemetry.logging import get_log_handler from prefect.telemetry.processors import InFlightSpanProcessor @@ -160,3 +168,219 @@ def test_logger_provider( log_handler = get_log_handler() assert isinstance(log_handler, LoggingHandler) assert log_handler._logger_provider == logger_provider + + +class TestTaskRunInstrumentation: + @pytest.fixture(params=["async", "sync"]) + async def engine_type(self, request): + return request.param + + async def run_task(self, task, task_run_id, parameters, engine_type): + if engine_type == "async": + return await run_task_async( + task, task_run_id=task_run_id, parameters=parameters + ) + else: + return run_task_sync(task, task_run_id=task_run_id, parameters=parameters) + + async def test_span_creation( + self, engine_type, instrumentation: InstrumentationTester + ): + @task + async def async_task(x: int, y: int): + return x + y + + @task + def sync_task(x: int, y: int): + return x + y + + task_fn = async_task if engine_type == "async" else sync_task + task_run_id = uuid4() + + await self.run_task( + task_fn, + task_run_id=task_run_id, + parameters={"x": 1, "y": 2}, + engine_type=engine_type, + ) + + spans = instrumentation.get_finished_spans() + assert len(spans) == 1 + span = spans[0] + + instrumentation.assert_has_attributes( + span, {"prefect.run.id": str(task_run_id), "prefect.run.type": "task"} + ) + assert spans[0].name == task_fn.__name__ + + async def test_span_attributes(self, engine_type, instrumentation): + @task + async def async_task(x: int, y: int): + return x + y + + @task + def sync_task(x: int, y: int): + return x + y + + task_fn = async_task if engine_type == "async" else sync_task + task_run_id = uuid4() + + await self.run_task( + task_fn, + task_run_id=task_run_id, + parameters={"x": 1, "y": 2}, + engine_type=engine_type, + ) + + spans = instrumentation.get_finished_spans() + assert len(spans) == 1 + instrumentation.assert_has_attributes( + spans[0], + { + "prefect.run.id": str(task_run_id), + "prefect.run.type": "task", + "prefect.run.parameter.x": "int", + "prefect.run.parameter.y": "int", + }, + ) + assert spans[0].name == task_fn.__name__ + + async def test_span_events(self, engine_type, instrumentation): + @task + async def async_task(x: int, y: int): + return x + y + + @task + def sync_task(x: int, y: int): + return x + y + + task_fn = async_task if engine_type == "async" else sync_task + task_run_id = uuid4() + + await self.run_task( + task_fn, + task_run_id=task_run_id, + parameters={"x": 1, "y": 2}, + engine_type=engine_type, + ) + + spans = instrumentation.get_finished_spans() + events = spans[0].events + assert len(events) == 2 + assert events[0].name == "Running" + assert events[1].name == "Completed" + + async def test_span_status_on_success(self, engine_type, instrumentation): + @task + async def async_task(x: int, y: int): + return x + y + + @task + def sync_task(x: int, y: int): + return x + y + + task_fn = async_task if engine_type == "async" else sync_task + task_run_id = uuid4() + + await self.run_task( + task_fn, + task_run_id=task_run_id, + parameters={"x": 1, "y": 2}, + engine_type=engine_type, + ) + + spans = instrumentation.get_finished_spans() + assert len(spans) == 1 + assert spans[0].status.status_code == trace.StatusCode.OK + + async def test_span_status_on_failure(self, engine_type, instrumentation): + @task + async def async_task(x: int, y: int): + raise ValueError("Test error") + + @task + def sync_task(x: int, y: int): + raise ValueError("Test error") + + task_fn = async_task if engine_type == "async" else sync_task + task_run_id = uuid4() + + with pytest.raises(ValueError, match="Test error"): + await self.run_task( + task_fn, + task_run_id=task_run_id, + parameters={"x": 1, "y": 2}, + engine_type=engine_type, + ) + + spans = instrumentation.get_finished_spans() + assert len(spans) == 1 + assert spans[0].status.status_code == trace.StatusCode.ERROR + assert "Test error" in spans[0].status.description + + async def test_span_exception_recording(self, engine_type, instrumentation): + @task + async def async_task(x: int, y: int): + raise Exception("Test error") + + @task + def sync_task(x: int, y: int): + raise Exception("Test error") + + task_fn = async_task if engine_type == "async" else sync_task + task_run_id = uuid4() + + with pytest.raises(Exception, match="Test error"): + await self.run_task( + task_fn, + task_run_id=task_run_id, + parameters={"x": 1, "y": 2}, + engine_type=engine_type, + ) + + spans = instrumentation.get_finished_spans() + assert len(spans) == 1 + + events = spans[0].events + assert any(event.name == "exception" for event in events) + exception_event = next(event for event in events if event.name == "exception") + assert exception_event.attributes["exception.type"] == "Exception" + assert exception_event.attributes["exception.message"] == "Test error" + + async def test_flow_labels(self, engine_type, instrumentation, sync_prefect_client): + """Test that parent flow ID gets propagated to task spans""" + + @task + async def async_child_task(): + return 1 + + @task + def sync_child_task(): + return 1 + + @flow + async def async_parent_flow(): + return await async_child_task() + + @flow + def sync_parent_flow(): + return sync_child_task() + + if engine_type == "async": + state = await async_parent_flow(return_state=True) + else: + state = sync_parent_flow(return_state=True) + + spans = instrumentation.get_finished_spans() + task_spans = [ + span for span in spans if span.attributes.get("prefect.run.type") == "task" + ] + assert len(task_spans) == 1 + + assert state.state_details.flow_run_id is not None + flow_run = sync_prefect_client.read_flow_run(state.state_details.flow_run_id) + + # Verify the task span has the parent flow's ID + instrumentation.assert_has_attributes( + task_spans[0], {**flow_run.labels, "prefect.run.type": "task"} + ) From 93d59f35c36afa3281893fd648c08623f35a80a3 Mon Sep 17 00:00:00 2001 From: Alexander Streed Date: Wed, 4 Dec 2024 12:10:47 -0600 Subject: [PATCH 09/92] Rename `prefect.server.database.migrations` to `prefect.server.database._migrations` (#16214) --- .github/workflows/static-analysis.yaml | 4 ++-- MANIFEST.in | 6 +++--- scripts/generate_sdk_docs.py | 2 +- .../database/{migrations => _migrations}/MIGRATION-NOTES.md | 0 .../server/database/{migrations => _migrations}/env.py | 0 .../database/{migrations => _migrations}/script.py.mako | 0 .../2021_01_20_122127_25f4b90a7a42_initial_migration.py | 0 .../postgresql/2022_02_13_125213_5f376def75c3_block_data.py | 0 .../2022_02_13_125213_679e695af6ba_add_configurations.py | 0 ...2_02_17_140821_5bff7878e700_add_agents_and_work_queue.py | 0 ...022_02_19_205543_d9d98a9ebb6f_rename_block_data_table.py | 0 .../2022_02_20_103844_4799f657a6a1_add_block_spec_table.py | 0 ...21_111050_d115556a8ab6_index_flowrun_flow_runner_type.py | 0 ...02_21_150017_b68b3cad6b8a_add_block_spec_id_to_blocks.py | 0 .../2022_03_10_102713_2e7e1428ffce_index_flow_created.py | 0 ...022_04_20_113011_605ebb4e9155_add_flow_run_state_name.py | 0 .../2022_04_21_095519_14dc68cc5853_backfill_state_name.py | 0 ...3_132803_d38c5e6a9115_rename_block_to_blockbasis_and_.py | 0 ...45956_1c9390e2f9c6_replace_version_with_checksum_and_.py | 0 .../2022_05_12_202952_dc7a3c6fd3e9_add_flow_run_alerts.py | 0 ..._26_135743_724e6dcc6b5d_add_block_schema_capabilities.py | 0 ..._2fe6fe6ca16e_adds_block_schema_references_and_block_.py | 0 ...9_cdcb4018dd0e_rename_run_alerts_to_run_notifications.py | 0 ...535_d60c18774a5d_add_indexes_for_partial_name_matches.py | 0 ...53_3a7c41d3b464_adds_description_and_code_example_to_.py | 0 ...61c76ee09e02_add_anonymous_column_for_block_documents.py | 0 ...22_06_17_204409_d335ad57d5ba_add_block_schema_indexes.py | 0 ...921_7296741dff68_add_protected_column_for_block_types.py | 0 ..._093732_29ad9bef6147_adds_indexes_for_block_filtering.py | 0 ...9_135432_813ddf14e2de_add_descriptions_to_deployments.py | 0 ...219_2f46fc3f3beb_remove_name_column_for_notification_.py | 0 ...4cdc2ba709a4_migrates_block_schemas_with_new_secrets_.py | 0 ...09_e905fd199258_removes_debugprintnotification_block_.py | 0 ...70700_112c68143fc3_add_infrastructure_document_id_to_.py | 0 ..._0f27d462bf6d_removing_default_storage_block_document.py | 0 ...7_19_160058_bb4dc90d3e29_renames_existing_block_types.py | 0 .../2022_07_21_133134_e085c9cbf8ce_remove_flow_runners.py | 0 ...2022_07_21_205820_0cf7311d6ea6_add_crashed_state_type.py | 0 .../2022_07_25_214717_4ff2f2bf81f4_adds_block_type_slug.py | 0 ...3637_add97ce1937d_update_deployments_to_include_more_.py | 0 ...13_fa985d474982_add_index_to_flow_run_infrastructure_.py | 0 ...2022_08_01_211251_97e212ea6545_add_deployment_version.py | 0 ...60e428f92a75_expand_deployment_schema_for_improved_ux.py | 0 ...08_07_134410_77eb737fc759_add_work_queue_name_to_runs.py | 0 ...550_7737221bf8a4_fix_concurrency_limit_tag_index_name.py | 0 ...2_08_18_102804_2d5e000696f1_adds_block_schema_version.py | 0 .../2022_10_12_102048_22b7cb02e593_add_state_timestamp.py | 0 ...22_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py | 0 .../2022_10_19_093902_6d548701edef_add_created_by.py | 0 ...2022_10_19_165110_8ea825da948d_track_retries_restarts.py | 0 .../2022_10_20_101423_3ced59d8806b_add_last_polled.py | 0 .../versions/postgresql/2022_10_31_161719_41e5ed9e1034_.py | 0 ...0555_54c1876c68ae_add_index_for_scheduled_deployments.py | 0 ..._171740_8caf7c1fd82c_add_coalesced_start_time_indices.py | 0 ...18_161056_5d526270ddb4_add_flowrun_infrastructure_pid.py | 0 .../2022_11_23_092449_5e4f924ff96c_add_paused_state_type.py | 0 .../2022_11_24_143620_f7587d6c5776_add_worker_tables.py | 0 ...180142_d481d5058a19_rename_worker_pools_to_work_pools.py | 0 ...164028_9326a6aee18b_add_cancelling_to_state_type_enum.py | 0 ...23_01_26_045500_2882cd2df463_implement_artifact_table.py | 0 ...2023_01_26_045501_2882cd2df464_create_migration_index.py | 0 .../2023_01_26_045501_2882cd2df465_migrate_artifact_data.py | 0 ..._01_26_045502_2882cd2df466_cleanup_artifact_migration.py | 0 ...023_01_26_152801_0a1250a5aa25_expand_work_queue_table.py | 0 ...3_01_31_110543_f98ae6d8e2cc_work_queue_data_migration.py | 0 ..._31_133052_2a88656f4a23_clean_up_work_queue_migration.py | 0 .../2023_02_08_151958_cfdfec5d7557_remove_artifact_fk.py | 0 ...3_01_154651_7d918a392297_remove_flowrun_deployment_fk.py | 0 ...3_15_153039_4a1a0e4f89de_add_artifact_description_col.py | 0 .../2023_03_20_175243_aa84ac237ce8_remove_artifact_uq.py | 0 ..._20_185238_d20618ce678e_add_artifact_collection_table.py | 0 .../2023_03_20_185610_46bd82c6279a_add_index_on_artifact.py | 0 .../2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py | 0 ...4_172310_6a1eb3d442e4_add_cols_to_artifact_collection.py | 0 ...0406_43c94d4c7aa3_add_pull_steps_column_to_deployment.py | 0 .../2023_04_05_134520_310dda75f561_add_variables.py | 0 .../2023_04_06_122716_15f5083c16bd_migrate_artifact_data.py | 0 ...133838_5f623ddbf7fe_create_concurrency_limit_v2_table.py | 0 .../versions/postgresql/2023_09_06_085747_50f8c182c3ca_.py | 0 ...db0eb3973a54_adds_enforce_parameter_schema_column_to_.py | 0 ..._4e9a6f93eb6c_make_slot_decay_per_second_not_nullable.py | 0 ...6f882b1d_remove_flow_run_id_requirement_from_task_run.py | 0 ..._12_224511_bfe653bbf62e_add_last_polled_to_deployment.py | 0 ...26_cef24af2ec34_add_block_type_name_to_block_document.py | 0 ...08_9c493c02ca6d_add_trgm_index_to_block_document_name.py | 0 ...12_07_095320_733ca1903976_create_flow_run_input_table.py | 0 ...7c453555d3a5_make_flowruninput_flow_run_id_a_foreign_.py | 0 ..._01_05_101034_6b63c51c31b4_add_sender_to_flowruninput.py | 0 ...0615_8cf4d4933848_create_deployment_schedule_and_add_.py | 0 ...28_121699507574_add_job_variables_column_to_flow_runs.py | 0 ...024_03_13_111215_7a653837d9ba_create_csrf_token_toble.py | 0 .../2024_04_03_112409_aeea5ee6f070_automations_models.py | 0 ...94418_bd6efa529f03_add_deployment_version_to_flow_run.py | 0 ...4_04_09_125658_916718e8330f_automation_event_follower.py | 0 .../2024_04_09_132036_954db7517015_trigger_in_index.py | 0 ...42_15768c2ec702_add_events_and_event_resources_tables.py | 0 .../2024_04_23_094748_7ae9e431e67a_work_status_fields.py | 0 .../2024_04_25_155240_8905262ec07f_worker_status_field.py | 0 ...b23c83a12cb4_add_catchup_fields_to_deploymentschedule.py | 0 .../2024_05_21_101457_94622c1663e8_json_variables.py | 0 ...4_07_15_145240_7495a5013e7e_adding_scope_to_followers.py | 0 ..._150111_97429116795e_add_deployment_concurrency_limit.py | 0 ...024_09_11_090317_555ed31b284d_add_concurrency_options.py | 0 ...51_eaec5004771f_add_deployment_to_global_concurrency_.py | 0 ...50706_68a44144428d_add_labels_column_to_flow_flowrun_.py | 0 .../2022_01_20_115236_9725c1cbee35_initial_migration.py | 0 .../sqlite/2022_02_04_093838_619bea85701a_block_data.py | 0 .../2022_02_15_211737_28ae48128c75_add_configurations.py | 0 ..._02_17_151416_7c91cb86dc4e_add_agents_and_work_queues.py | 0 ...022_02_19_210255_4c4a6a138053_rename_block_data_table.py | 0 .../2022_02_20_103610_e1ff4973a9eb_add_block_spec_table.py | 0 ...21_111238_f327e877e423_index_flowrun_flow_runner_type.py | 0 ...02_21_145916_c8ff35f94028_add_block_spec_id_to_blocks.py | 0 .../2022_03_10_102500_71a57ec351d1_index_flow_created.py | 0 ...022_04_19_181604_7f5f335cace3_add_flow_run_state_name.py | 0 .../2022_04_21_113057_db6bde582447_backfill_state_name.py | 0 ...3_114831_fd966d4ad99c_rename_block_to_blockbasis_and_.py | 0 ..._25_135207_b75d279ba985_replace_version_with_checksum.py | 0 .../2022_05_12_203158_888a0bb0df7b_add_flow_run_alerts.py | 0 ..._19_165808_33439667aeea_add_block_schema_capabilities.py | 0 ..._e73c6f1fe752_adds_block_schema_referecnes_and_block_.py | 0 ...5_d76326ed0d06_rename_run_alerts_to_run_notifications.py | 0 ...048_f65b6ad0b869_add_indexes_for_partial_name_matches.py | 0 ...02_84892301571a_adds_description_and_code_example_to_.py | 0 ...2d900af9cd07_add_anonymous_column_for_block_documents.py | 0 ...22_06_17_204530_9e2a1c08c6f1_add_block_schema_indexes.py | 0 ...823_dff8da7a6c2c_add_protected_column_for_block_types.py | 0 ..._093640_a205b458d997_adds_indexes_for_block_filtering.py | 0 ...9_133432_3bd87ecdac38_add_descriptions_to_deployments.py | 0 ...832_42762c37b7bc_remove_name_column_for_notification_.py | 0 ...e2dae764a603_migrates_block_schemas_with_new_secrets_.py | 0 ...08_061c7e518b40_removes_debugprintnotification_block_.py | 0 ...1_113314_638cbcc2a158_add_infrastructure_block_id_to_.py | 0 ..._56be24fdb383_removing_default_storage_block_document.py | 0 ...7_19_153432_628a873f0d1a_renames_existing_block_types.py | 0 .../2022_07_20_113451_2fe8ef6a6514_remove_flow_runners.py | 0 .../2022_07_25_142515_f335f9633eec_adds_block_type_slug.py | 0 ...1028_88c2112b668f_update_deployments_to_include_more_.py | 0 ...11_905134444e17_add_index_to_flow_run_infrastructure_.py | 0 ...2022_08_01_211039_24bb2e4a195c_add_deployment_version.py | 0 ...296e2665785f_expand_deployment_schema_for_improved_ux.py | 0 ...08_07_134138_575634b7acd4_add_work_queue_name_to_runs.py | 0 ...19_53c19b31aa09_fix_name_on_concurrency_limit_tag_idx.py | 0 ...2_08_18_102527_e757138e954a_adds_block_schema_version.py | 0 .../2022_10_12_102048_22b7cb02e593_add_state_timestamp.py | 0 ...22_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py | 0 .../sqlite/2022_10_19_093542_fa319f214160_add_created_by.py | 0 ...2022_10_19_155810_af52717cf201_track_retries_restarts.py | 0 .../2022_10_20_101423_3ced59d8806b_add_last_polled.py | 0 ...0619_a0284438370e_add_index_for_scheduled_deployments.py | 0 ..._165921_4f90ad6349bd_add_coalesced_start_time_indices.py | 0 ...18_161332_7201de756d85_add_flowrun_infrastructure_pid.py | 0 .../2022_11_24_143302_fe77ad0dda06_add_worker_tables.py | 0 ...175327_bb38729c471a_rename_worker_pools_to_work_pools.py | 0 ...23_01_12_000042_f92143d30c24_implement_artifact_table.py | 0 ...2023_01_12_000043_f92143d30c25_create_migration_index.py | 0 .../2023_01_12_000043_f92143d30c26_migrate_artifact_data.py | 0 ..._01_12_000044_f92143d30c27_cleanup_artifact_migration.py | 0 ...023_01_25_114348_b9bda9f142f1_expand_work_queue_table.py | 0 ...3_01_31_105442_1678f2fb8b33_work_queue_data_migration.py | 0 ..._31_132409_bfe42b7090d6_clean_up_work_queue_migration.py | 0 .../2023_02_08_152028_8d148e44e669_remove_artifact_fk.py | 0 ...3_01_165551_f3df94dca3cc_remove_flowrun_deployment_fk.py | 0 ...3_15_123850_cf1159bd0d3c_add_artifact_description_col.py | 0 ..._20_153925_1d7441c031d0_remove_uq_from_artifact_table.py | 0 ..._20_184534_b9aafc3ab936_add_artifact_collection_table.py | 0 .../2023_03_20_194204_422f8ba9541d_add_artifact_idx.py | 0 .../2023_04_04_115150_553920ec20e9_add_index_on_log.py | 0 ...4_172555_3e1eb8281d5e_add_cols_to_artifact_collection.py | 0 ...340f457b315f_add_column_to_deployments_for_pull_steps.py | 0 .../sqlite/2023_04_05_134301_3d46e23593d6_add_variables.py | 0 .../2023_04_06_122659_2dbcec43c857_migrate_artifact_data.py | 0 ...113813_5b0bd3b41a23_create_concurrency_limit_v2_table.py | 0 .../versions/sqlite/2023_09_06_084729_c2d001b7dd06_.py | 0 ...ef674d598dd3_adds_enforce_parameter_schema_column_to_.py | 0 ..._8167af8df781_make_slot_decay_per_second_not_nullable.py | 0 ...af8df781_remove_flow_run_id_requirement_from_task_run.py | 0 ..._12_175815_f3165ae0a213_add_last_polled_to_deployment.py | 0 ...26_cef24af2ec34_add_block_type_name_to_block_document.py | 0 ...03720_22ef3915ccd8_index_and_backfill_block_type_name.py | 0 ...08_9c493c02ca6d_add_trgm_index_to_block_document_name.py | 0 ...12_07_095112_a299308852a7_create_flow_run_input_table.py | 0 ...35659cc49969_make_flowruninput_flow_run_id_a_foreign_.py | 0 ..._01_05_101041_c63a0a6dc787_add_sender_to_flowruninput.py | 0 ...0214_265eb1a2da4c_create_deployment_schedule_and_add_.py | 0 ...58_342220764f0b_add_job_variables_column_to_flow_runs.py | 0 ...024_03_13_111316_bacc60edce16_create_csrf_token_toble.py | 0 .../2024_04_03_111618_07ed05dfd4ec_automations_models.py | 0 ...14538_8644a9595a08_add_deployment_version_to_flow_run.py | 0 ...4_04_09_125712_cc510aec4689_automation_event_follower.py | 0 .../2024_04_09_131832_2b6c2b548f95_trigger_in_index.py | 0 .../2024_04_10_104304_824e9edafa60_adds_events_tables.py | 0 .../2024_04_23_094701_75c8f17b8b51_work_status_fields.py | 0 .../2024_04_25_155120_a8e62d4c72cf_worker_status_field.py | 0 ...20fbd53b3cef_add_catchup_fields_to_deploymentschedule.py | 0 .../sqlite/2024_05_21_123101_2ac65f1758c2_json_variables.py | 0 ...4_07_15_145350_354f1ede7e9f_adding_scope_to_followers.py | 0 ..._145052_f93e1439f022_add_deployment_concurrency_limit.py | 0 ...024_09_11_090106_7d6350aea855_add_concurrency_options.py | 0 ...19_4ad4658cbefe_add_deployment_to_global_concurrency_.py | 0 ...51042_5952a5498b51_add_labels_column_to_flow_flowrun_.py | 0 src/prefect/server/database/alembic.ini | 2 +- src/prefect/server/database/alembic_commands.py | 2 +- src/prefect/server/database/orm_models.py | 4 ++-- 204 files changed, 10 insertions(+), 10 deletions(-) rename src/prefect/server/database/{migrations => _migrations}/MIGRATION-NOTES.md (100%) rename src/prefect/server/database/{migrations => _migrations}/env.py (100%) rename src/prefect/server/database/{migrations => _migrations}/script.py.mako (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2021_01_20_122127_25f4b90a7a42_initial_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_13_125213_5f376def75c3_block_data.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_13_125213_679e695af6ba_add_configurations.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_17_140821_5bff7878e700_add_agents_and_work_queue.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_19_205543_d9d98a9ebb6f_rename_block_data_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_20_103844_4799f657a6a1_add_block_spec_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_21_111050_d115556a8ab6_index_flowrun_flow_runner_type.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_02_21_150017_b68b3cad6b8a_add_block_spec_id_to_blocks.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_03_10_102713_2e7e1428ffce_index_flow_created.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_04_20_113011_605ebb4e9155_add_flow_run_state_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_04_21_095519_14dc68cc5853_backfill_state_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_04_23_132803_d38c5e6a9115_rename_block_to_blockbasis_and_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_05_10_145956_1c9390e2f9c6_replace_version_with_checksum_and_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_05_12_202952_dc7a3c6fd3e9_add_flow_run_alerts.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_05_26_135743_724e6dcc6b5d_add_block_schema_capabilities.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_05_28_081821_2fe6fe6ca16e_adds_block_schema_references_and_block_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_05_30_112549_cdcb4018dd0e_rename_run_alerts_to_run_notifications.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_04_133535_d60c18774a5d_add_indexes_for_partial_name_matches.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_08_121753_3a7c41d3b464_adds_description_and_code_example_to_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_13_104234_61c76ee09e02_add_anonymous_column_for_block_documents.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_17_204409_d335ad57d5ba_add_block_schema_indexes.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_20_123921_7296741dff68_add_protected_column_for_block_types.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_21_093732_29ad9bef6147_adds_indexes_for_block_filtering.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_29_135432_813ddf14e2de_add_descriptions_to_deployments.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_06_29_152219_2f46fc3f3beb_remove_name_column_for_notification_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_06_152528_4cdc2ba709a4_migrates_block_schemas_with_new_secrets_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_07_112809_e905fd199258_removes_debugprintnotification_block_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_11_170700_112c68143fc3_add_infrastructure_document_id_to_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_14_114039_0f27d462bf6d_removing_default_storage_block_document.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_19_160058_bb4dc90d3e29_renames_existing_block_types.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_21_133134_e085c9cbf8ce_remove_flow_runners.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_21_205820_0cf7311d6ea6_add_crashed_state_type.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_25_214717_4ff2f2bf81f4_adds_block_type_slug.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_25_233637_add97ce1937d_update_deployments_to_include_more_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_07_29_181713_fa985d474982_add_index_to_flow_run_infrastructure_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_08_01_211251_97e212ea6545_add_deployment_version.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_08_06_145817_60e428f92a75_expand_deployment_schema_for_improved_ux.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_08_07_134410_77eb737fc759_add_work_queue_name_to_runs.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_08_07_154550_7737221bf8a4_fix_concurrency_limit_tag_index_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_08_18_102804_2d5e000696f1_adds_block_schema_version.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_10_19_093902_6d548701edef_add_created_by.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_10_19_165110_8ea825da948d_track_retries_restarts.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_10_20_101423_3ced59d8806b_add_last_polled.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_10_31_161719_41e5ed9e1034_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_11_05_180555_54c1876c68ae_add_index_for_scheduled_deployments.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_11_10_171740_8caf7c1fd82c_add_coalesced_start_time_indices.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_11_18_161056_5d526270ddb4_add_flowrun_infrastructure_pid.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_11_23_092449_5e4f924ff96c_add_paused_state_type.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2022_11_24_143620_f7587d6c5776_add_worker_tables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_08_180142_d481d5058a19_rename_worker_pools_to_work_pools.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_25_164028_9326a6aee18b_add_cancelling_to_state_type_enum.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_26_045500_2882cd2df463_implement_artifact_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_26_045501_2882cd2df464_create_migration_index.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_26_045501_2882cd2df465_migrate_artifact_data.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_26_045502_2882cd2df466_cleanup_artifact_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_26_152801_0a1250a5aa25_expand_work_queue_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_31_110543_f98ae6d8e2cc_work_queue_data_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_01_31_133052_2a88656f4a23_clean_up_work_queue_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_02_08_151958_cfdfec5d7557_remove_artifact_fk.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_03_01_154651_7d918a392297_remove_flowrun_deployment_fk.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_03_15_153039_4a1a0e4f89de_add_artifact_description_col.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_03_20_175243_aa84ac237ce8_remove_artifact_uq.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_03_20_185238_d20618ce678e_add_artifact_collection_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_03_20_185610_46bd82c6279a_add_index_on_artifact.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_04_04_172310_6a1eb3d442e4_add_cols_to_artifact_collection.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_04_05_130406_43c94d4c7aa3_add_pull_steps_column_to_deployment.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_04_05_134520_310dda75f561_add_variables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_04_06_122716_15f5083c16bd_migrate_artifact_data.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_08_02_133838_5f623ddbf7fe_create_concurrency_limit_v2_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_09_06_085747_50f8c182c3ca_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_09_20_134544_db0eb3973a54_adds_enforce_parameter_schema_column_to_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_09_21_130125_4e9a6f93eb6c_make_slot_decay_per_second_not_nullable.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_09_25_121806_05ea6f882b1d_remove_flow_run_id_requirement_from_task_run.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_10_12_224511_bfe653bbf62e_add_last_polled_to_deployment.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_12_07_095320_733ca1903976_create_flow_run_input_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2023_12_07_121416_7c453555d3a5_make_flowruninput_flow_run_id_a_foreign_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_01_05_101034_6b63c51c31b4_add_sender_to_flowruninput.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_01_22_120615_8cf4d4933848_create_deployment_schedule_and_add_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_03_05_122228_121699507574_add_job_variables_column_to_flow_runs.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_03_13_111215_7a653837d9ba_create_csrf_token_toble.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_03_112409_aeea5ee6f070_automations_models.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_04_094418_bd6efa529f03_add_deployment_version_to_flow_run.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_09_125658_916718e8330f_automation_event_follower.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_09_132036_954db7517015_trigger_in_index.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_10_194742_15768c2ec702_add_events_and_event_resources_tables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_23_094748_7ae9e431e67a_work_status_fields.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_04_25_155240_8905262ec07f_worker_status_field.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_05_01_105401_b23c83a12cb4_add_catchup_fields_to_deploymentschedule.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_05_21_101457_94622c1663e8_json_variables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_07_15_145240_7495a5013e7e_adding_scope_to_followers.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_08_14_150111_97429116795e_add_deployment_concurrency_limit.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_09_11_090317_555ed31b284d_add_concurrency_options.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_09_16_152051_eaec5004771f_add_deployment_to_global_concurrency_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/postgresql/2024_11_15_150706_68a44144428d_add_labels_column_to_flow_flowrun_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_01_20_115236_9725c1cbee35_initial_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_04_093838_619bea85701a_block_data.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_15_211737_28ae48128c75_add_configurations.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_17_151416_7c91cb86dc4e_add_agents_and_work_queues.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_19_210255_4c4a6a138053_rename_block_data_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_20_103610_e1ff4973a9eb_add_block_spec_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_21_111238_f327e877e423_index_flowrun_flow_runner_type.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_02_21_145916_c8ff35f94028_add_block_spec_id_to_blocks.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_03_10_102500_71a57ec351d1_index_flow_created.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_04_19_181604_7f5f335cace3_add_flow_run_state_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_04_21_113057_db6bde582447_backfill_state_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_04_23_114831_fd966d4ad99c_rename_block_to_blockbasis_and_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_04_25_135207_b75d279ba985_replace_version_with_checksum.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_05_12_203158_888a0bb0df7b_add_flow_run_alerts.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_05_19_165808_33439667aeea_add_block_schema_capabilities.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_05_28_081650_e73c6f1fe752_adds_block_schema_referecnes_and_block_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_05_30_100855_d76326ed0d06_rename_run_alerts_to_run_notifications.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_04_104048_f65b6ad0b869_add_indexes_for_partial_name_matches.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_08_121702_84892301571a_adds_description_and_code_example_to_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_13_103943_2d900af9cd07_add_anonymous_column_for_block_documents.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_17_204530_9e2a1c08c6f1_add_block_schema_indexes.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_20_123823_dff8da7a6c2c_add_protected_column_for_block_types.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_21_093640_a205b458d997_adds_indexes_for_block_filtering.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_29_133432_3bd87ecdac38_add_descriptions_to_deployments.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_06_29_151832_42762c37b7bc_remove_name_column_for_notification_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_06_142824_e2dae764a603_migrates_block_schemas_with_new_secrets_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_07_111208_061c7e518b40_removes_debugprintnotification_block_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_11_113314_638cbcc2a158_add_infrastructure_block_id_to_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_14_113138_56be24fdb383_removing_default_storage_block_document.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_19_153432_628a873f0d1a_renames_existing_block_types.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_20_113451_2fe8ef6a6514_remove_flow_runners.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_25_142515_f335f9633eec_adds_block_type_slug.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_25_151028_88c2112b668f_update_deployments_to_include_more_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_07_29_181111_905134444e17_add_index_to_flow_run_infrastructure_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_08_01_211039_24bb2e4a195c_add_deployment_version.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_08_06_130009_296e2665785f_expand_deployment_schema_for_improved_ux.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_08_07_134138_575634b7acd4_add_work_queue_name_to_runs.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_08_07_154319_53c19b31aa09_fix_name_on_concurrency_limit_tag_idx.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_08_18_102527_e757138e954a_adds_block_schema_version.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_10_19_093542_fa319f214160_add_created_by.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_10_19_155810_af52717cf201_track_retries_restarts.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_10_20_101423_3ced59d8806b_add_last_polled.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_11_05_180619_a0284438370e_add_index_for_scheduled_deployments.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_11_10_165921_4f90ad6349bd_add_coalesced_start_time_indices.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_11_18_161332_7201de756d85_add_flowrun_infrastructure_pid.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2022_11_24_143302_fe77ad0dda06_add_worker_tables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_08_175327_bb38729c471a_rename_worker_pools_to_work_pools.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_12_000042_f92143d30c24_implement_artifact_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_12_000043_f92143d30c25_create_migration_index.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_12_000043_f92143d30c26_migrate_artifact_data.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_12_000044_f92143d30c27_cleanup_artifact_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_25_114348_b9bda9f142f1_expand_work_queue_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_31_105442_1678f2fb8b33_work_queue_data_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_01_31_132409_bfe42b7090d6_clean_up_work_queue_migration.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_02_08_152028_8d148e44e669_remove_artifact_fk.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_03_01_165551_f3df94dca3cc_remove_flowrun_deployment_fk.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_03_15_123850_cf1159bd0d3c_add_artifact_description_col.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_03_20_153925_1d7441c031d0_remove_uq_from_artifact_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_03_20_184534_b9aafc3ab936_add_artifact_collection_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_03_20_194204_422f8ba9541d_add_artifact_idx.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_04_04_115150_553920ec20e9_add_index_on_log.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_04_04_172555_3e1eb8281d5e_add_cols_to_artifact_collection.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_04_05_120713_340f457b315f_add_column_to_deployments_for_pull_steps.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_04_05_134301_3d46e23593d6_add_variables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_04_06_122659_2dbcec43c857_migrate_artifact_data.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_08_02_113813_5b0bd3b41a23_create_concurrency_limit_v2_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_09_06_084729_c2d001b7dd06_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_09_20_134145_ef674d598dd3_adds_enforce_parameter_schema_column_to_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_09_21_121806_8167af8df781_make_slot_decay_per_second_not_nullable.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_09_25_121806_8167af8df781_remove_flow_run_id_requirement_from_task_run.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_10_12_175815_f3165ae0a213_add_last_polled_to_deployment.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_10_30_103720_22ef3915ccd8_index_and_backfill_block_type_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_12_07_095112_a299308852a7_create_flow_run_input_table.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2023_12_07_121624_35659cc49969_make_flowruninput_flow_run_id_a_foreign_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_01_05_101041_c63a0a6dc787_add_sender_to_flowruninput.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_01_22_120214_265eb1a2da4c_create_deployment_schedule_and_add_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_03_05_115258_342220764f0b_add_job_variables_column_to_flow_runs.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_03_13_111316_bacc60edce16_create_csrf_token_toble.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_03_111618_07ed05dfd4ec_automations_models.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_04_114538_8644a9595a08_add_deployment_version_to_flow_run.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_09_125712_cc510aec4689_automation_event_follower.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_09_131832_2b6c2b548f95_trigger_in_index.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_10_104304_824e9edafa60_adds_events_tables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_23_094701_75c8f17b8b51_work_status_fields.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_04_25_155120_a8e62d4c72cf_worker_status_field.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_05_01_103824_20fbd53b3cef_add_catchup_fields_to_deploymentschedule.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_05_21_123101_2ac65f1758c2_json_variables.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_07_15_145350_354f1ede7e9f_adding_scope_to_followers.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_08_14_145052_f93e1439f022_add_deployment_concurrency_limit.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_09_11_090106_7d6350aea855_add_concurrency_options.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_09_16_162719_4ad4658cbefe_add_deployment_to_global_concurrency_.py (100%) rename src/prefect/server/database/{migrations => _migrations}/versions/sqlite/2024_11_15_151042_5952a5498b51_add_labels_column_to_flow_flowrun_.py (100%) diff --git a/.github/workflows/static-analysis.yaml b/.github/workflows/static-analysis.yaml index 0e31c210c498..2df6ed87ae3c 100644 --- a/.github/workflows/static-analysis.yaml +++ b/.github/workflows/static-analysis.yaml @@ -87,7 +87,7 @@ jobs: run: | # `pyright` will exit with a non-zero status code if it finds any issues, # so we need to explicitly ignore the exit code with `|| true`. - uv tool run --with . pyright --verifytypes prefect --ignoreexternal --outputjson > prefect-analysis.json || true + uv tool run --with-editable . pyright --verifytypes prefect --ignoreexternal --outputjson > prefect-analysis.json || true SCORE=$(jq -r '.typeCompleteness.completenessScore' prefect-analysis.json) echo "current_score=$SCORE" >> $GITHUB_OUTPUT @@ -98,7 +98,7 @@ jobs: - name: Calculate base branch score id: calculate_base_score run: | - uv tool run --with . pyright --verifytypes prefect --ignoreexternal --outputjson > prefect-analysis-base.json || true + uv tool run --with-editable . pyright --verifytypes prefect --ignoreexternal --outputjson > prefect-analysis-base.json || true BASE_SCORE=$(jq -r '.typeCompleteness.completenessScore' prefect-analysis-base.json) echo "base_score=$BASE_SCORE" >> $GITHUB_OUTPUT diff --git a/MANIFEST.in b/MANIFEST.in index 53e36053eb31..fd224ca8a6c8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -25,9 +25,9 @@ include src/prefect/server/api/collections_data/views/*.json # Migrations include src/prefect/server/database/alembic.ini -include src/prefect/server/database/migrations/* -include src/prefect/server/database/migrations/versions/* -include src/prefect/server/database/migrations/versions/*/* +include src/prefect/server/database/_migrations/* +include src/prefect/server/database/_migrations/versions/* +include src/prefect/server/database/_migrations/versions/*/* # SQL templates graft src/prefect/server/database/sql diff --git a/scripts/generate_sdk_docs.py b/scripts/generate_sdk_docs.py index 4fd21871ef39..7a9847356650 100644 --- a/scripts/generate_sdk_docs.py +++ b/scripts/generate_sdk_docs.py @@ -15,7 +15,7 @@ def docs_path() -> Path: SKIPPED = [ "prefect._internal", - "prefect.server.database.migrations", + "prefect.server.database._migrations", ] diff --git a/src/prefect/server/database/migrations/MIGRATION-NOTES.md b/src/prefect/server/database/_migrations/MIGRATION-NOTES.md similarity index 100% rename from src/prefect/server/database/migrations/MIGRATION-NOTES.md rename to src/prefect/server/database/_migrations/MIGRATION-NOTES.md diff --git a/src/prefect/server/database/migrations/env.py b/src/prefect/server/database/_migrations/env.py similarity index 100% rename from src/prefect/server/database/migrations/env.py rename to src/prefect/server/database/_migrations/env.py diff --git a/src/prefect/server/database/migrations/script.py.mako b/src/prefect/server/database/_migrations/script.py.mako similarity index 100% rename from src/prefect/server/database/migrations/script.py.mako rename to src/prefect/server/database/_migrations/script.py.mako diff --git a/src/prefect/server/database/migrations/versions/postgresql/2021_01_20_122127_25f4b90a7a42_initial_migration.py b/src/prefect/server/database/_migrations/versions/postgresql/2021_01_20_122127_25f4b90a7a42_initial_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2021_01_20_122127_25f4b90a7a42_initial_migration.py rename to src/prefect/server/database/_migrations/versions/postgresql/2021_01_20_122127_25f4b90a7a42_initial_migration.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_13_125213_5f376def75c3_block_data.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_13_125213_5f376def75c3_block_data.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_13_125213_5f376def75c3_block_data.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_13_125213_5f376def75c3_block_data.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_13_125213_679e695af6ba_add_configurations.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_13_125213_679e695af6ba_add_configurations.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_13_125213_679e695af6ba_add_configurations.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_13_125213_679e695af6ba_add_configurations.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_17_140821_5bff7878e700_add_agents_and_work_queue.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_17_140821_5bff7878e700_add_agents_and_work_queue.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_17_140821_5bff7878e700_add_agents_and_work_queue.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_17_140821_5bff7878e700_add_agents_and_work_queue.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_19_205543_d9d98a9ebb6f_rename_block_data_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_19_205543_d9d98a9ebb6f_rename_block_data_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_19_205543_d9d98a9ebb6f_rename_block_data_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_19_205543_d9d98a9ebb6f_rename_block_data_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_20_103844_4799f657a6a1_add_block_spec_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_20_103844_4799f657a6a1_add_block_spec_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_20_103844_4799f657a6a1_add_block_spec_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_20_103844_4799f657a6a1_add_block_spec_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_21_111050_d115556a8ab6_index_flowrun_flow_runner_type.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_21_111050_d115556a8ab6_index_flowrun_flow_runner_type.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_21_111050_d115556a8ab6_index_flowrun_flow_runner_type.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_21_111050_d115556a8ab6_index_flowrun_flow_runner_type.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_02_21_150017_b68b3cad6b8a_add_block_spec_id_to_blocks.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_02_21_150017_b68b3cad6b8a_add_block_spec_id_to_blocks.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_02_21_150017_b68b3cad6b8a_add_block_spec_id_to_blocks.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_02_21_150017_b68b3cad6b8a_add_block_spec_id_to_blocks.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_03_10_102713_2e7e1428ffce_index_flow_created.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_03_10_102713_2e7e1428ffce_index_flow_created.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_03_10_102713_2e7e1428ffce_index_flow_created.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_03_10_102713_2e7e1428ffce_index_flow_created.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_04_20_113011_605ebb4e9155_add_flow_run_state_name.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_04_20_113011_605ebb4e9155_add_flow_run_state_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_04_20_113011_605ebb4e9155_add_flow_run_state_name.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_04_20_113011_605ebb4e9155_add_flow_run_state_name.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_04_21_095519_14dc68cc5853_backfill_state_name.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_04_21_095519_14dc68cc5853_backfill_state_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_04_21_095519_14dc68cc5853_backfill_state_name.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_04_21_095519_14dc68cc5853_backfill_state_name.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_04_23_132803_d38c5e6a9115_rename_block_to_blockbasis_and_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_04_23_132803_d38c5e6a9115_rename_block_to_blockbasis_and_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_04_23_132803_d38c5e6a9115_rename_block_to_blockbasis_and_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_04_23_132803_d38c5e6a9115_rename_block_to_blockbasis_and_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_05_10_145956_1c9390e2f9c6_replace_version_with_checksum_and_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_05_10_145956_1c9390e2f9c6_replace_version_with_checksum_and_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_05_10_145956_1c9390e2f9c6_replace_version_with_checksum_and_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_05_10_145956_1c9390e2f9c6_replace_version_with_checksum_and_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_05_12_202952_dc7a3c6fd3e9_add_flow_run_alerts.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_05_12_202952_dc7a3c6fd3e9_add_flow_run_alerts.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_05_12_202952_dc7a3c6fd3e9_add_flow_run_alerts.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_05_12_202952_dc7a3c6fd3e9_add_flow_run_alerts.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_05_26_135743_724e6dcc6b5d_add_block_schema_capabilities.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_05_26_135743_724e6dcc6b5d_add_block_schema_capabilities.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_05_26_135743_724e6dcc6b5d_add_block_schema_capabilities.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_05_26_135743_724e6dcc6b5d_add_block_schema_capabilities.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_05_28_081821_2fe6fe6ca16e_adds_block_schema_references_and_block_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_05_28_081821_2fe6fe6ca16e_adds_block_schema_references_and_block_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_05_28_081821_2fe6fe6ca16e_adds_block_schema_references_and_block_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_05_28_081821_2fe6fe6ca16e_adds_block_schema_references_and_block_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_05_30_112549_cdcb4018dd0e_rename_run_alerts_to_run_notifications.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_05_30_112549_cdcb4018dd0e_rename_run_alerts_to_run_notifications.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_05_30_112549_cdcb4018dd0e_rename_run_alerts_to_run_notifications.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_05_30_112549_cdcb4018dd0e_rename_run_alerts_to_run_notifications.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_04_133535_d60c18774a5d_add_indexes_for_partial_name_matches.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_04_133535_d60c18774a5d_add_indexes_for_partial_name_matches.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_04_133535_d60c18774a5d_add_indexes_for_partial_name_matches.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_04_133535_d60c18774a5d_add_indexes_for_partial_name_matches.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_08_121753_3a7c41d3b464_adds_description_and_code_example_to_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_08_121753_3a7c41d3b464_adds_description_and_code_example_to_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_08_121753_3a7c41d3b464_adds_description_and_code_example_to_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_08_121753_3a7c41d3b464_adds_description_and_code_example_to_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_13_104234_61c76ee09e02_add_anonymous_column_for_block_documents.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_13_104234_61c76ee09e02_add_anonymous_column_for_block_documents.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_13_104234_61c76ee09e02_add_anonymous_column_for_block_documents.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_13_104234_61c76ee09e02_add_anonymous_column_for_block_documents.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_17_204409_d335ad57d5ba_add_block_schema_indexes.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_17_204409_d335ad57d5ba_add_block_schema_indexes.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_17_204409_d335ad57d5ba_add_block_schema_indexes.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_17_204409_d335ad57d5ba_add_block_schema_indexes.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_20_123921_7296741dff68_add_protected_column_for_block_types.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_20_123921_7296741dff68_add_protected_column_for_block_types.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_20_123921_7296741dff68_add_protected_column_for_block_types.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_20_123921_7296741dff68_add_protected_column_for_block_types.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_21_093732_29ad9bef6147_adds_indexes_for_block_filtering.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_21_093732_29ad9bef6147_adds_indexes_for_block_filtering.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_21_093732_29ad9bef6147_adds_indexes_for_block_filtering.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_21_093732_29ad9bef6147_adds_indexes_for_block_filtering.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_29_135432_813ddf14e2de_add_descriptions_to_deployments.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_29_135432_813ddf14e2de_add_descriptions_to_deployments.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_29_135432_813ddf14e2de_add_descriptions_to_deployments.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_29_135432_813ddf14e2de_add_descriptions_to_deployments.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_06_29_152219_2f46fc3f3beb_remove_name_column_for_notification_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_06_29_152219_2f46fc3f3beb_remove_name_column_for_notification_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_06_29_152219_2f46fc3f3beb_remove_name_column_for_notification_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_06_29_152219_2f46fc3f3beb_remove_name_column_for_notification_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_06_152528_4cdc2ba709a4_migrates_block_schemas_with_new_secrets_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_06_152528_4cdc2ba709a4_migrates_block_schemas_with_new_secrets_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_06_152528_4cdc2ba709a4_migrates_block_schemas_with_new_secrets_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_06_152528_4cdc2ba709a4_migrates_block_schemas_with_new_secrets_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_07_112809_e905fd199258_removes_debugprintnotification_block_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_07_112809_e905fd199258_removes_debugprintnotification_block_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_07_112809_e905fd199258_removes_debugprintnotification_block_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_07_112809_e905fd199258_removes_debugprintnotification_block_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_11_170700_112c68143fc3_add_infrastructure_document_id_to_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_11_170700_112c68143fc3_add_infrastructure_document_id_to_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_11_170700_112c68143fc3_add_infrastructure_document_id_to_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_11_170700_112c68143fc3_add_infrastructure_document_id_to_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_14_114039_0f27d462bf6d_removing_default_storage_block_document.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_14_114039_0f27d462bf6d_removing_default_storage_block_document.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_14_114039_0f27d462bf6d_removing_default_storage_block_document.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_14_114039_0f27d462bf6d_removing_default_storage_block_document.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_19_160058_bb4dc90d3e29_renames_existing_block_types.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_19_160058_bb4dc90d3e29_renames_existing_block_types.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_19_160058_bb4dc90d3e29_renames_existing_block_types.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_19_160058_bb4dc90d3e29_renames_existing_block_types.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_21_133134_e085c9cbf8ce_remove_flow_runners.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_21_133134_e085c9cbf8ce_remove_flow_runners.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_21_133134_e085c9cbf8ce_remove_flow_runners.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_21_133134_e085c9cbf8ce_remove_flow_runners.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_21_205820_0cf7311d6ea6_add_crashed_state_type.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_21_205820_0cf7311d6ea6_add_crashed_state_type.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_21_205820_0cf7311d6ea6_add_crashed_state_type.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_21_205820_0cf7311d6ea6_add_crashed_state_type.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_25_214717_4ff2f2bf81f4_adds_block_type_slug.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_25_214717_4ff2f2bf81f4_adds_block_type_slug.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_25_214717_4ff2f2bf81f4_adds_block_type_slug.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_25_214717_4ff2f2bf81f4_adds_block_type_slug.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_25_233637_add97ce1937d_update_deployments_to_include_more_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_25_233637_add97ce1937d_update_deployments_to_include_more_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_25_233637_add97ce1937d_update_deployments_to_include_more_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_25_233637_add97ce1937d_update_deployments_to_include_more_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_07_29_181713_fa985d474982_add_index_to_flow_run_infrastructure_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_07_29_181713_fa985d474982_add_index_to_flow_run_infrastructure_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_07_29_181713_fa985d474982_add_index_to_flow_run_infrastructure_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_07_29_181713_fa985d474982_add_index_to_flow_run_infrastructure_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_08_01_211251_97e212ea6545_add_deployment_version.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_08_01_211251_97e212ea6545_add_deployment_version.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_08_01_211251_97e212ea6545_add_deployment_version.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_08_01_211251_97e212ea6545_add_deployment_version.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_08_06_145817_60e428f92a75_expand_deployment_schema_for_improved_ux.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_08_06_145817_60e428f92a75_expand_deployment_schema_for_improved_ux.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_08_06_145817_60e428f92a75_expand_deployment_schema_for_improved_ux.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_08_06_145817_60e428f92a75_expand_deployment_schema_for_improved_ux.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_08_07_134410_77eb737fc759_add_work_queue_name_to_runs.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_08_07_134410_77eb737fc759_add_work_queue_name_to_runs.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_08_07_134410_77eb737fc759_add_work_queue_name_to_runs.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_08_07_134410_77eb737fc759_add_work_queue_name_to_runs.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_08_07_154550_7737221bf8a4_fix_concurrency_limit_tag_index_name.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_08_07_154550_7737221bf8a4_fix_concurrency_limit_tag_index_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_08_07_154550_7737221bf8a4_fix_concurrency_limit_tag_index_name.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_08_07_154550_7737221bf8a4_fix_concurrency_limit_tag_index_name.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_08_18_102804_2d5e000696f1_adds_block_schema_version.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_08_18_102804_2d5e000696f1_adds_block_schema_version.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_08_18_102804_2d5e000696f1_adds_block_schema_version.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_08_18_102804_2d5e000696f1_adds_block_schema_version.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_10_19_093902_6d548701edef_add_created_by.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_10_19_093902_6d548701edef_add_created_by.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_10_19_093902_6d548701edef_add_created_by.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_10_19_093902_6d548701edef_add_created_by.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_10_19_165110_8ea825da948d_track_retries_restarts.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_10_19_165110_8ea825da948d_track_retries_restarts.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_10_19_165110_8ea825da948d_track_retries_restarts.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_10_19_165110_8ea825da948d_track_retries_restarts.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_10_20_101423_3ced59d8806b_add_last_polled.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_10_20_101423_3ced59d8806b_add_last_polled.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_10_20_101423_3ced59d8806b_add_last_polled.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_10_20_101423_3ced59d8806b_add_last_polled.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_10_31_161719_41e5ed9e1034_.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_10_31_161719_41e5ed9e1034_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_10_31_161719_41e5ed9e1034_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_10_31_161719_41e5ed9e1034_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_11_05_180555_54c1876c68ae_add_index_for_scheduled_deployments.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_11_05_180555_54c1876c68ae_add_index_for_scheduled_deployments.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_11_05_180555_54c1876c68ae_add_index_for_scheduled_deployments.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_11_05_180555_54c1876c68ae_add_index_for_scheduled_deployments.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_11_10_171740_8caf7c1fd82c_add_coalesced_start_time_indices.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_11_10_171740_8caf7c1fd82c_add_coalesced_start_time_indices.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_11_10_171740_8caf7c1fd82c_add_coalesced_start_time_indices.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_11_10_171740_8caf7c1fd82c_add_coalesced_start_time_indices.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_11_18_161056_5d526270ddb4_add_flowrun_infrastructure_pid.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_11_18_161056_5d526270ddb4_add_flowrun_infrastructure_pid.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_11_18_161056_5d526270ddb4_add_flowrun_infrastructure_pid.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_11_18_161056_5d526270ddb4_add_flowrun_infrastructure_pid.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_11_23_092449_5e4f924ff96c_add_paused_state_type.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_11_23_092449_5e4f924ff96c_add_paused_state_type.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_11_23_092449_5e4f924ff96c_add_paused_state_type.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_11_23_092449_5e4f924ff96c_add_paused_state_type.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2022_11_24_143620_f7587d6c5776_add_worker_tables.py b/src/prefect/server/database/_migrations/versions/postgresql/2022_11_24_143620_f7587d6c5776_add_worker_tables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2022_11_24_143620_f7587d6c5776_add_worker_tables.py rename to src/prefect/server/database/_migrations/versions/postgresql/2022_11_24_143620_f7587d6c5776_add_worker_tables.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_08_180142_d481d5058a19_rename_worker_pools_to_work_pools.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_08_180142_d481d5058a19_rename_worker_pools_to_work_pools.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_08_180142_d481d5058a19_rename_worker_pools_to_work_pools.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_08_180142_d481d5058a19_rename_worker_pools_to_work_pools.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_25_164028_9326a6aee18b_add_cancelling_to_state_type_enum.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_25_164028_9326a6aee18b_add_cancelling_to_state_type_enum.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_25_164028_9326a6aee18b_add_cancelling_to_state_type_enum.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_25_164028_9326a6aee18b_add_cancelling_to_state_type_enum.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045500_2882cd2df463_implement_artifact_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045500_2882cd2df463_implement_artifact_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045500_2882cd2df463_implement_artifact_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045500_2882cd2df463_implement_artifact_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045501_2882cd2df464_create_migration_index.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045501_2882cd2df464_create_migration_index.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045501_2882cd2df464_create_migration_index.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045501_2882cd2df464_create_migration_index.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045501_2882cd2df465_migrate_artifact_data.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045501_2882cd2df465_migrate_artifact_data.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045501_2882cd2df465_migrate_artifact_data.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045501_2882cd2df465_migrate_artifact_data.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045502_2882cd2df466_cleanup_artifact_migration.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045502_2882cd2df466_cleanup_artifact_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_26_045502_2882cd2df466_cleanup_artifact_migration.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_045502_2882cd2df466_cleanup_artifact_migration.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_26_152801_0a1250a5aa25_expand_work_queue_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_152801_0a1250a5aa25_expand_work_queue_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_26_152801_0a1250a5aa25_expand_work_queue_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_26_152801_0a1250a5aa25_expand_work_queue_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_31_110543_f98ae6d8e2cc_work_queue_data_migration.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_31_110543_f98ae6d8e2cc_work_queue_data_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_31_110543_f98ae6d8e2cc_work_queue_data_migration.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_31_110543_f98ae6d8e2cc_work_queue_data_migration.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_01_31_133052_2a88656f4a23_clean_up_work_queue_migration.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_01_31_133052_2a88656f4a23_clean_up_work_queue_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_01_31_133052_2a88656f4a23_clean_up_work_queue_migration.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_01_31_133052_2a88656f4a23_clean_up_work_queue_migration.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_02_08_151958_cfdfec5d7557_remove_artifact_fk.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_02_08_151958_cfdfec5d7557_remove_artifact_fk.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_02_08_151958_cfdfec5d7557_remove_artifact_fk.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_02_08_151958_cfdfec5d7557_remove_artifact_fk.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_03_01_154651_7d918a392297_remove_flowrun_deployment_fk.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_03_01_154651_7d918a392297_remove_flowrun_deployment_fk.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_03_01_154651_7d918a392297_remove_flowrun_deployment_fk.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_03_01_154651_7d918a392297_remove_flowrun_deployment_fk.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_03_15_153039_4a1a0e4f89de_add_artifact_description_col.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_03_15_153039_4a1a0e4f89de_add_artifact_description_col.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_03_15_153039_4a1a0e4f89de_add_artifact_description_col.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_03_15_153039_4a1a0e4f89de_add_artifact_description_col.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_03_20_175243_aa84ac237ce8_remove_artifact_uq.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_03_20_175243_aa84ac237ce8_remove_artifact_uq.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_03_20_175243_aa84ac237ce8_remove_artifact_uq.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_03_20_175243_aa84ac237ce8_remove_artifact_uq.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_03_20_185238_d20618ce678e_add_artifact_collection_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_03_20_185238_d20618ce678e_add_artifact_collection_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_03_20_185238_d20618ce678e_add_artifact_collection_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_03_20_185238_d20618ce678e_add_artifact_collection_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_03_20_185610_46bd82c6279a_add_index_on_artifact.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_03_20_185610_46bd82c6279a_add_index_on_artifact.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_03_20_185610_46bd82c6279a_add_index_on_artifact.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_03_20_185610_46bd82c6279a_add_index_on_artifact.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_04_04_172310_6a1eb3d442e4_add_cols_to_artifact_collection.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_04_04_172310_6a1eb3d442e4_add_cols_to_artifact_collection.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_04_04_172310_6a1eb3d442e4_add_cols_to_artifact_collection.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_04_04_172310_6a1eb3d442e4_add_cols_to_artifact_collection.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_04_05_130406_43c94d4c7aa3_add_pull_steps_column_to_deployment.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_04_05_130406_43c94d4c7aa3_add_pull_steps_column_to_deployment.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_04_05_130406_43c94d4c7aa3_add_pull_steps_column_to_deployment.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_04_05_130406_43c94d4c7aa3_add_pull_steps_column_to_deployment.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_04_05_134520_310dda75f561_add_variables.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_04_05_134520_310dda75f561_add_variables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_04_05_134520_310dda75f561_add_variables.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_04_05_134520_310dda75f561_add_variables.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_04_06_122716_15f5083c16bd_migrate_artifact_data.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_04_06_122716_15f5083c16bd_migrate_artifact_data.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_04_06_122716_15f5083c16bd_migrate_artifact_data.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_04_06_122716_15f5083c16bd_migrate_artifact_data.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_08_02_133838_5f623ddbf7fe_create_concurrency_limit_v2_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_08_02_133838_5f623ddbf7fe_create_concurrency_limit_v2_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_08_02_133838_5f623ddbf7fe_create_concurrency_limit_v2_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_08_02_133838_5f623ddbf7fe_create_concurrency_limit_v2_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_09_06_085747_50f8c182c3ca_.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_09_06_085747_50f8c182c3ca_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_09_06_085747_50f8c182c3ca_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_09_06_085747_50f8c182c3ca_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_09_20_134544_db0eb3973a54_adds_enforce_parameter_schema_column_to_.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_09_20_134544_db0eb3973a54_adds_enforce_parameter_schema_column_to_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_09_20_134544_db0eb3973a54_adds_enforce_parameter_schema_column_to_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_09_20_134544_db0eb3973a54_adds_enforce_parameter_schema_column_to_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_09_21_130125_4e9a6f93eb6c_make_slot_decay_per_second_not_nullable.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_09_21_130125_4e9a6f93eb6c_make_slot_decay_per_second_not_nullable.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_09_21_130125_4e9a6f93eb6c_make_slot_decay_per_second_not_nullable.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_09_21_130125_4e9a6f93eb6c_make_slot_decay_per_second_not_nullable.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_09_25_121806_05ea6f882b1d_remove_flow_run_id_requirement_from_task_run.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_09_25_121806_05ea6f882b1d_remove_flow_run_id_requirement_from_task_run.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_09_25_121806_05ea6f882b1d_remove_flow_run_id_requirement_from_task_run.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_09_25_121806_05ea6f882b1d_remove_flow_run_id_requirement_from_task_run.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_10_12_224511_bfe653bbf62e_add_last_polled_to_deployment.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_10_12_224511_bfe653bbf62e_add_last_polled_to_deployment.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_10_12_224511_bfe653bbf62e_add_last_polled_to_deployment.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_10_12_224511_bfe653bbf62e_add_last_polled_to_deployment.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_12_07_095320_733ca1903976_create_flow_run_input_table.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_12_07_095320_733ca1903976_create_flow_run_input_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_12_07_095320_733ca1903976_create_flow_run_input_table.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_12_07_095320_733ca1903976_create_flow_run_input_table.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2023_12_07_121416_7c453555d3a5_make_flowruninput_flow_run_id_a_foreign_.py b/src/prefect/server/database/_migrations/versions/postgresql/2023_12_07_121416_7c453555d3a5_make_flowruninput_flow_run_id_a_foreign_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2023_12_07_121416_7c453555d3a5_make_flowruninput_flow_run_id_a_foreign_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2023_12_07_121416_7c453555d3a5_make_flowruninput_flow_run_id_a_foreign_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_01_05_101034_6b63c51c31b4_add_sender_to_flowruninput.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_01_05_101034_6b63c51c31b4_add_sender_to_flowruninput.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_01_05_101034_6b63c51c31b4_add_sender_to_flowruninput.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_01_05_101034_6b63c51c31b4_add_sender_to_flowruninput.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_01_22_120615_8cf4d4933848_create_deployment_schedule_and_add_.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_01_22_120615_8cf4d4933848_create_deployment_schedule_and_add_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_01_22_120615_8cf4d4933848_create_deployment_schedule_and_add_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_01_22_120615_8cf4d4933848_create_deployment_schedule_and_add_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_03_05_122228_121699507574_add_job_variables_column_to_flow_runs.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_03_05_122228_121699507574_add_job_variables_column_to_flow_runs.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_03_05_122228_121699507574_add_job_variables_column_to_flow_runs.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_03_05_122228_121699507574_add_job_variables_column_to_flow_runs.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_03_13_111215_7a653837d9ba_create_csrf_token_toble.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_03_13_111215_7a653837d9ba_create_csrf_token_toble.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_03_13_111215_7a653837d9ba_create_csrf_token_toble.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_03_13_111215_7a653837d9ba_create_csrf_token_toble.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_03_112409_aeea5ee6f070_automations_models.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_03_112409_aeea5ee6f070_automations_models.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_03_112409_aeea5ee6f070_automations_models.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_03_112409_aeea5ee6f070_automations_models.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_04_094418_bd6efa529f03_add_deployment_version_to_flow_run.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_04_094418_bd6efa529f03_add_deployment_version_to_flow_run.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_04_094418_bd6efa529f03_add_deployment_version_to_flow_run.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_04_094418_bd6efa529f03_add_deployment_version_to_flow_run.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_09_125658_916718e8330f_automation_event_follower.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_09_125658_916718e8330f_automation_event_follower.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_09_125658_916718e8330f_automation_event_follower.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_09_125658_916718e8330f_automation_event_follower.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_09_132036_954db7517015_trigger_in_index.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_09_132036_954db7517015_trigger_in_index.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_09_132036_954db7517015_trigger_in_index.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_09_132036_954db7517015_trigger_in_index.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_10_194742_15768c2ec702_add_events_and_event_resources_tables.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_10_194742_15768c2ec702_add_events_and_event_resources_tables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_10_194742_15768c2ec702_add_events_and_event_resources_tables.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_10_194742_15768c2ec702_add_events_and_event_resources_tables.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_23_094748_7ae9e431e67a_work_status_fields.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_23_094748_7ae9e431e67a_work_status_fields.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_23_094748_7ae9e431e67a_work_status_fields.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_23_094748_7ae9e431e67a_work_status_fields.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_04_25_155240_8905262ec07f_worker_status_field.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_04_25_155240_8905262ec07f_worker_status_field.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_04_25_155240_8905262ec07f_worker_status_field.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_04_25_155240_8905262ec07f_worker_status_field.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_05_01_105401_b23c83a12cb4_add_catchup_fields_to_deploymentschedule.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_05_01_105401_b23c83a12cb4_add_catchup_fields_to_deploymentschedule.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_05_01_105401_b23c83a12cb4_add_catchup_fields_to_deploymentschedule.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_05_01_105401_b23c83a12cb4_add_catchup_fields_to_deploymentschedule.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_05_21_101457_94622c1663e8_json_variables.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_05_21_101457_94622c1663e8_json_variables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_05_21_101457_94622c1663e8_json_variables.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_05_21_101457_94622c1663e8_json_variables.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_07_15_145240_7495a5013e7e_adding_scope_to_followers.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_07_15_145240_7495a5013e7e_adding_scope_to_followers.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_07_15_145240_7495a5013e7e_adding_scope_to_followers.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_07_15_145240_7495a5013e7e_adding_scope_to_followers.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_08_14_150111_97429116795e_add_deployment_concurrency_limit.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_08_14_150111_97429116795e_add_deployment_concurrency_limit.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_08_14_150111_97429116795e_add_deployment_concurrency_limit.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_08_14_150111_97429116795e_add_deployment_concurrency_limit.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_09_11_090317_555ed31b284d_add_concurrency_options.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_09_11_090317_555ed31b284d_add_concurrency_options.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_09_11_090317_555ed31b284d_add_concurrency_options.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_09_11_090317_555ed31b284d_add_concurrency_options.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_09_16_152051_eaec5004771f_add_deployment_to_global_concurrency_.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_09_16_152051_eaec5004771f_add_deployment_to_global_concurrency_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_09_16_152051_eaec5004771f_add_deployment_to_global_concurrency_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_09_16_152051_eaec5004771f_add_deployment_to_global_concurrency_.py diff --git a/src/prefect/server/database/migrations/versions/postgresql/2024_11_15_150706_68a44144428d_add_labels_column_to_flow_flowrun_.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_11_15_150706_68a44144428d_add_labels_column_to_flow_flowrun_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/postgresql/2024_11_15_150706_68a44144428d_add_labels_column_to_flow_flowrun_.py rename to src/prefect/server/database/_migrations/versions/postgresql/2024_11_15_150706_68a44144428d_add_labels_column_to_flow_flowrun_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_01_20_115236_9725c1cbee35_initial_migration.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_01_20_115236_9725c1cbee35_initial_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_01_20_115236_9725c1cbee35_initial_migration.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_01_20_115236_9725c1cbee35_initial_migration.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_04_093838_619bea85701a_block_data.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_04_093838_619bea85701a_block_data.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_04_093838_619bea85701a_block_data.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_04_093838_619bea85701a_block_data.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_15_211737_28ae48128c75_add_configurations.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_15_211737_28ae48128c75_add_configurations.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_15_211737_28ae48128c75_add_configurations.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_15_211737_28ae48128c75_add_configurations.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_17_151416_7c91cb86dc4e_add_agents_and_work_queues.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_17_151416_7c91cb86dc4e_add_agents_and_work_queues.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_17_151416_7c91cb86dc4e_add_agents_and_work_queues.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_17_151416_7c91cb86dc4e_add_agents_and_work_queues.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_19_210255_4c4a6a138053_rename_block_data_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_19_210255_4c4a6a138053_rename_block_data_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_19_210255_4c4a6a138053_rename_block_data_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_19_210255_4c4a6a138053_rename_block_data_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_20_103610_e1ff4973a9eb_add_block_spec_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_20_103610_e1ff4973a9eb_add_block_spec_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_20_103610_e1ff4973a9eb_add_block_spec_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_20_103610_e1ff4973a9eb_add_block_spec_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_21_111238_f327e877e423_index_flowrun_flow_runner_type.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_21_111238_f327e877e423_index_flowrun_flow_runner_type.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_21_111238_f327e877e423_index_flowrun_flow_runner_type.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_21_111238_f327e877e423_index_flowrun_flow_runner_type.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_02_21_145916_c8ff35f94028_add_block_spec_id_to_blocks.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_02_21_145916_c8ff35f94028_add_block_spec_id_to_blocks.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_02_21_145916_c8ff35f94028_add_block_spec_id_to_blocks.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_02_21_145916_c8ff35f94028_add_block_spec_id_to_blocks.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_03_10_102500_71a57ec351d1_index_flow_created.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_03_10_102500_71a57ec351d1_index_flow_created.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_03_10_102500_71a57ec351d1_index_flow_created.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_03_10_102500_71a57ec351d1_index_flow_created.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_04_19_181604_7f5f335cace3_add_flow_run_state_name.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_04_19_181604_7f5f335cace3_add_flow_run_state_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_04_19_181604_7f5f335cace3_add_flow_run_state_name.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_04_19_181604_7f5f335cace3_add_flow_run_state_name.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_04_21_113057_db6bde582447_backfill_state_name.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_04_21_113057_db6bde582447_backfill_state_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_04_21_113057_db6bde582447_backfill_state_name.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_04_21_113057_db6bde582447_backfill_state_name.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_04_23_114831_fd966d4ad99c_rename_block_to_blockbasis_and_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_04_23_114831_fd966d4ad99c_rename_block_to_blockbasis_and_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_04_23_114831_fd966d4ad99c_rename_block_to_blockbasis_and_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_04_23_114831_fd966d4ad99c_rename_block_to_blockbasis_and_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_04_25_135207_b75d279ba985_replace_version_with_checksum.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_04_25_135207_b75d279ba985_replace_version_with_checksum.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_04_25_135207_b75d279ba985_replace_version_with_checksum.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_04_25_135207_b75d279ba985_replace_version_with_checksum.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_05_12_203158_888a0bb0df7b_add_flow_run_alerts.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_05_12_203158_888a0bb0df7b_add_flow_run_alerts.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_05_12_203158_888a0bb0df7b_add_flow_run_alerts.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_05_12_203158_888a0bb0df7b_add_flow_run_alerts.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_05_19_165808_33439667aeea_add_block_schema_capabilities.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_05_19_165808_33439667aeea_add_block_schema_capabilities.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_05_19_165808_33439667aeea_add_block_schema_capabilities.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_05_19_165808_33439667aeea_add_block_schema_capabilities.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_05_28_081650_e73c6f1fe752_adds_block_schema_referecnes_and_block_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_05_28_081650_e73c6f1fe752_adds_block_schema_referecnes_and_block_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_05_28_081650_e73c6f1fe752_adds_block_schema_referecnes_and_block_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_05_28_081650_e73c6f1fe752_adds_block_schema_referecnes_and_block_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_05_30_100855_d76326ed0d06_rename_run_alerts_to_run_notifications.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_05_30_100855_d76326ed0d06_rename_run_alerts_to_run_notifications.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_05_30_100855_d76326ed0d06_rename_run_alerts_to_run_notifications.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_05_30_100855_d76326ed0d06_rename_run_alerts_to_run_notifications.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_04_104048_f65b6ad0b869_add_indexes_for_partial_name_matches.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_04_104048_f65b6ad0b869_add_indexes_for_partial_name_matches.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_04_104048_f65b6ad0b869_add_indexes_for_partial_name_matches.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_04_104048_f65b6ad0b869_add_indexes_for_partial_name_matches.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_08_121702_84892301571a_adds_description_and_code_example_to_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_08_121702_84892301571a_adds_description_and_code_example_to_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_08_121702_84892301571a_adds_description_and_code_example_to_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_08_121702_84892301571a_adds_description_and_code_example_to_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_13_103943_2d900af9cd07_add_anonymous_column_for_block_documents.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_13_103943_2d900af9cd07_add_anonymous_column_for_block_documents.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_13_103943_2d900af9cd07_add_anonymous_column_for_block_documents.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_13_103943_2d900af9cd07_add_anonymous_column_for_block_documents.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_17_204530_9e2a1c08c6f1_add_block_schema_indexes.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_17_204530_9e2a1c08c6f1_add_block_schema_indexes.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_17_204530_9e2a1c08c6f1_add_block_schema_indexes.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_17_204530_9e2a1c08c6f1_add_block_schema_indexes.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_20_123823_dff8da7a6c2c_add_protected_column_for_block_types.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_20_123823_dff8da7a6c2c_add_protected_column_for_block_types.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_20_123823_dff8da7a6c2c_add_protected_column_for_block_types.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_20_123823_dff8da7a6c2c_add_protected_column_for_block_types.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_21_093640_a205b458d997_adds_indexes_for_block_filtering.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_21_093640_a205b458d997_adds_indexes_for_block_filtering.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_21_093640_a205b458d997_adds_indexes_for_block_filtering.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_21_093640_a205b458d997_adds_indexes_for_block_filtering.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_29_133432_3bd87ecdac38_add_descriptions_to_deployments.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_29_133432_3bd87ecdac38_add_descriptions_to_deployments.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_29_133432_3bd87ecdac38_add_descriptions_to_deployments.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_29_133432_3bd87ecdac38_add_descriptions_to_deployments.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_06_29_151832_42762c37b7bc_remove_name_column_for_notification_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_06_29_151832_42762c37b7bc_remove_name_column_for_notification_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_06_29_151832_42762c37b7bc_remove_name_column_for_notification_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_06_29_151832_42762c37b7bc_remove_name_column_for_notification_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_06_142824_e2dae764a603_migrates_block_schemas_with_new_secrets_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_06_142824_e2dae764a603_migrates_block_schemas_with_new_secrets_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_06_142824_e2dae764a603_migrates_block_schemas_with_new_secrets_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_06_142824_e2dae764a603_migrates_block_schemas_with_new_secrets_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_07_111208_061c7e518b40_removes_debugprintnotification_block_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_07_111208_061c7e518b40_removes_debugprintnotification_block_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_07_111208_061c7e518b40_removes_debugprintnotification_block_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_07_111208_061c7e518b40_removes_debugprintnotification_block_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_11_113314_638cbcc2a158_add_infrastructure_block_id_to_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_11_113314_638cbcc2a158_add_infrastructure_block_id_to_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_11_113314_638cbcc2a158_add_infrastructure_block_id_to_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_11_113314_638cbcc2a158_add_infrastructure_block_id_to_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_14_113138_56be24fdb383_removing_default_storage_block_document.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_14_113138_56be24fdb383_removing_default_storage_block_document.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_14_113138_56be24fdb383_removing_default_storage_block_document.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_14_113138_56be24fdb383_removing_default_storage_block_document.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_19_153432_628a873f0d1a_renames_existing_block_types.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_19_153432_628a873f0d1a_renames_existing_block_types.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_19_153432_628a873f0d1a_renames_existing_block_types.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_19_153432_628a873f0d1a_renames_existing_block_types.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_20_113451_2fe8ef6a6514_remove_flow_runners.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_20_113451_2fe8ef6a6514_remove_flow_runners.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_20_113451_2fe8ef6a6514_remove_flow_runners.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_20_113451_2fe8ef6a6514_remove_flow_runners.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_25_142515_f335f9633eec_adds_block_type_slug.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_25_142515_f335f9633eec_adds_block_type_slug.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_25_142515_f335f9633eec_adds_block_type_slug.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_25_142515_f335f9633eec_adds_block_type_slug.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_25_151028_88c2112b668f_update_deployments_to_include_more_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_25_151028_88c2112b668f_update_deployments_to_include_more_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_25_151028_88c2112b668f_update_deployments_to_include_more_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_25_151028_88c2112b668f_update_deployments_to_include_more_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_07_29_181111_905134444e17_add_index_to_flow_run_infrastructure_.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_07_29_181111_905134444e17_add_index_to_flow_run_infrastructure_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_07_29_181111_905134444e17_add_index_to_flow_run_infrastructure_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_07_29_181111_905134444e17_add_index_to_flow_run_infrastructure_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_08_01_211039_24bb2e4a195c_add_deployment_version.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_08_01_211039_24bb2e4a195c_add_deployment_version.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_08_01_211039_24bb2e4a195c_add_deployment_version.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_08_01_211039_24bb2e4a195c_add_deployment_version.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_08_06_130009_296e2665785f_expand_deployment_schema_for_improved_ux.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_08_06_130009_296e2665785f_expand_deployment_schema_for_improved_ux.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_08_06_130009_296e2665785f_expand_deployment_schema_for_improved_ux.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_08_06_130009_296e2665785f_expand_deployment_schema_for_improved_ux.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_08_07_134138_575634b7acd4_add_work_queue_name_to_runs.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_08_07_134138_575634b7acd4_add_work_queue_name_to_runs.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_08_07_134138_575634b7acd4_add_work_queue_name_to_runs.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_08_07_134138_575634b7acd4_add_work_queue_name_to_runs.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_08_07_154319_53c19b31aa09_fix_name_on_concurrency_limit_tag_idx.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_08_07_154319_53c19b31aa09_fix_name_on_concurrency_limit_tag_idx.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_08_07_154319_53c19b31aa09_fix_name_on_concurrency_limit_tag_idx.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_08_07_154319_53c19b31aa09_fix_name_on_concurrency_limit_tag_idx.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_08_18_102527_e757138e954a_adds_block_schema_version.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_08_18_102527_e757138e954a_adds_block_schema_version.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_08_18_102527_e757138e954a_adds_block_schema_version.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_08_18_102527_e757138e954a_adds_block_schema_version.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_10_12_102048_22b7cb02e593_add_state_timestamp.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_10_14_172612_ad4b1b4d1e9d_index_deployment_created.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_10_19_093542_fa319f214160_add_created_by.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_10_19_093542_fa319f214160_add_created_by.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_10_19_093542_fa319f214160_add_created_by.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_10_19_093542_fa319f214160_add_created_by.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_10_19_155810_af52717cf201_track_retries_restarts.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_10_19_155810_af52717cf201_track_retries_restarts.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_10_19_155810_af52717cf201_track_retries_restarts.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_10_19_155810_af52717cf201_track_retries_restarts.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_10_20_101423_3ced59d8806b_add_last_polled.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_10_20_101423_3ced59d8806b_add_last_polled.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_10_20_101423_3ced59d8806b_add_last_polled.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_10_20_101423_3ced59d8806b_add_last_polled.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_11_05_180619_a0284438370e_add_index_for_scheduled_deployments.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_11_05_180619_a0284438370e_add_index_for_scheduled_deployments.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_11_05_180619_a0284438370e_add_index_for_scheduled_deployments.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_11_05_180619_a0284438370e_add_index_for_scheduled_deployments.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_11_10_165921_4f90ad6349bd_add_coalesced_start_time_indices.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_11_10_165921_4f90ad6349bd_add_coalesced_start_time_indices.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_11_10_165921_4f90ad6349bd_add_coalesced_start_time_indices.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_11_10_165921_4f90ad6349bd_add_coalesced_start_time_indices.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_11_18_161332_7201de756d85_add_flowrun_infrastructure_pid.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_11_18_161332_7201de756d85_add_flowrun_infrastructure_pid.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_11_18_161332_7201de756d85_add_flowrun_infrastructure_pid.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_11_18_161332_7201de756d85_add_flowrun_infrastructure_pid.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2022_11_24_143302_fe77ad0dda06_add_worker_tables.py b/src/prefect/server/database/_migrations/versions/sqlite/2022_11_24_143302_fe77ad0dda06_add_worker_tables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2022_11_24_143302_fe77ad0dda06_add_worker_tables.py rename to src/prefect/server/database/_migrations/versions/sqlite/2022_11_24_143302_fe77ad0dda06_add_worker_tables.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_08_175327_bb38729c471a_rename_worker_pools_to_work_pools.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_08_175327_bb38729c471a_rename_worker_pools_to_work_pools.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_08_175327_bb38729c471a_rename_worker_pools_to_work_pools.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_08_175327_bb38729c471a_rename_worker_pools_to_work_pools.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000042_f92143d30c24_implement_artifact_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000042_f92143d30c24_implement_artifact_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000042_f92143d30c24_implement_artifact_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000042_f92143d30c24_implement_artifact_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000043_f92143d30c25_create_migration_index.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000043_f92143d30c25_create_migration_index.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000043_f92143d30c25_create_migration_index.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000043_f92143d30c25_create_migration_index.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000043_f92143d30c26_migrate_artifact_data.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000043_f92143d30c26_migrate_artifact_data.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000043_f92143d30c26_migrate_artifact_data.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000043_f92143d30c26_migrate_artifact_data.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000044_f92143d30c27_cleanup_artifact_migration.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000044_f92143d30c27_cleanup_artifact_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_12_000044_f92143d30c27_cleanup_artifact_migration.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_12_000044_f92143d30c27_cleanup_artifact_migration.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_25_114348_b9bda9f142f1_expand_work_queue_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_25_114348_b9bda9f142f1_expand_work_queue_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_25_114348_b9bda9f142f1_expand_work_queue_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_25_114348_b9bda9f142f1_expand_work_queue_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_31_105442_1678f2fb8b33_work_queue_data_migration.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_31_105442_1678f2fb8b33_work_queue_data_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_31_105442_1678f2fb8b33_work_queue_data_migration.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_31_105442_1678f2fb8b33_work_queue_data_migration.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_01_31_132409_bfe42b7090d6_clean_up_work_queue_migration.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_01_31_132409_bfe42b7090d6_clean_up_work_queue_migration.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_01_31_132409_bfe42b7090d6_clean_up_work_queue_migration.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_01_31_132409_bfe42b7090d6_clean_up_work_queue_migration.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_02_08_152028_8d148e44e669_remove_artifact_fk.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_02_08_152028_8d148e44e669_remove_artifact_fk.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_02_08_152028_8d148e44e669_remove_artifact_fk.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_02_08_152028_8d148e44e669_remove_artifact_fk.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_03_01_165551_f3df94dca3cc_remove_flowrun_deployment_fk.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_03_01_165551_f3df94dca3cc_remove_flowrun_deployment_fk.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_03_01_165551_f3df94dca3cc_remove_flowrun_deployment_fk.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_03_01_165551_f3df94dca3cc_remove_flowrun_deployment_fk.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_03_15_123850_cf1159bd0d3c_add_artifact_description_col.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_03_15_123850_cf1159bd0d3c_add_artifact_description_col.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_03_15_123850_cf1159bd0d3c_add_artifact_description_col.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_03_15_123850_cf1159bd0d3c_add_artifact_description_col.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_03_20_153925_1d7441c031d0_remove_uq_from_artifact_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_03_20_153925_1d7441c031d0_remove_uq_from_artifact_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_03_20_153925_1d7441c031d0_remove_uq_from_artifact_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_03_20_153925_1d7441c031d0_remove_uq_from_artifact_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_03_20_184534_b9aafc3ab936_add_artifact_collection_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_03_20_184534_b9aafc3ab936_add_artifact_collection_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_03_20_184534_b9aafc3ab936_add_artifact_collection_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_03_20_184534_b9aafc3ab936_add_artifact_collection_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_03_20_194204_422f8ba9541d_add_artifact_idx.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_03_20_194204_422f8ba9541d_add_artifact_idx.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_03_20_194204_422f8ba9541d_add_artifact_idx.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_03_20_194204_422f8ba9541d_add_artifact_idx.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_04_04_115150_553920ec20e9_add_index_on_log.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_04_04_115150_553920ec20e9_add_index_on_log.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_04_04_115150_553920ec20e9_add_index_on_log.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_04_04_115150_553920ec20e9_add_index_on_log.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_04_04_172555_3e1eb8281d5e_add_cols_to_artifact_collection.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_04_04_172555_3e1eb8281d5e_add_cols_to_artifact_collection.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_04_04_172555_3e1eb8281d5e_add_cols_to_artifact_collection.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_04_04_172555_3e1eb8281d5e_add_cols_to_artifact_collection.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_04_05_120713_340f457b315f_add_column_to_deployments_for_pull_steps.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_04_05_120713_340f457b315f_add_column_to_deployments_for_pull_steps.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_04_05_120713_340f457b315f_add_column_to_deployments_for_pull_steps.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_04_05_120713_340f457b315f_add_column_to_deployments_for_pull_steps.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_04_05_134301_3d46e23593d6_add_variables.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_04_05_134301_3d46e23593d6_add_variables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_04_05_134301_3d46e23593d6_add_variables.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_04_05_134301_3d46e23593d6_add_variables.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_04_06_122659_2dbcec43c857_migrate_artifact_data.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_04_06_122659_2dbcec43c857_migrate_artifact_data.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_04_06_122659_2dbcec43c857_migrate_artifact_data.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_04_06_122659_2dbcec43c857_migrate_artifact_data.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_08_02_113813_5b0bd3b41a23_create_concurrency_limit_v2_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_08_02_113813_5b0bd3b41a23_create_concurrency_limit_v2_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_08_02_113813_5b0bd3b41a23_create_concurrency_limit_v2_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_08_02_113813_5b0bd3b41a23_create_concurrency_limit_v2_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_09_06_084729_c2d001b7dd06_.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_09_06_084729_c2d001b7dd06_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_09_06_084729_c2d001b7dd06_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_09_06_084729_c2d001b7dd06_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_09_20_134145_ef674d598dd3_adds_enforce_parameter_schema_column_to_.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_09_20_134145_ef674d598dd3_adds_enforce_parameter_schema_column_to_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_09_20_134145_ef674d598dd3_adds_enforce_parameter_schema_column_to_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_09_20_134145_ef674d598dd3_adds_enforce_parameter_schema_column_to_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_09_21_121806_8167af8df781_make_slot_decay_per_second_not_nullable.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_09_21_121806_8167af8df781_make_slot_decay_per_second_not_nullable.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_09_21_121806_8167af8df781_make_slot_decay_per_second_not_nullable.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_09_21_121806_8167af8df781_make_slot_decay_per_second_not_nullable.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_09_25_121806_8167af8df781_remove_flow_run_id_requirement_from_task_run.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_09_25_121806_8167af8df781_remove_flow_run_id_requirement_from_task_run.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_09_25_121806_8167af8df781_remove_flow_run_id_requirement_from_task_run.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_09_25_121806_8167af8df781_remove_flow_run_id_requirement_from_task_run.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_10_12_175815_f3165ae0a213_add_last_polled_to_deployment.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_10_12_175815_f3165ae0a213_add_last_polled_to_deployment.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_10_12_175815_f3165ae0a213_add_last_polled_to_deployment.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_10_12_175815_f3165ae0a213_add_last_polled_to_deployment.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_10_30_075026_cef24af2ec34_add_block_type_name_to_block_document.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_10_30_103720_22ef3915ccd8_index_and_backfill_block_type_name.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_10_30_103720_22ef3915ccd8_index_and_backfill_block_type_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_10_30_103720_22ef3915ccd8_index_and_backfill_block_type_name.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_10_30_103720_22ef3915ccd8_index_and_backfill_block_type_name.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_11_20_084708_9c493c02ca6d_add_trgm_index_to_block_document_name.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_12_07_095112_a299308852a7_create_flow_run_input_table.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_12_07_095112_a299308852a7_create_flow_run_input_table.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_12_07_095112_a299308852a7_create_flow_run_input_table.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_12_07_095112_a299308852a7_create_flow_run_input_table.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2023_12_07_121624_35659cc49969_make_flowruninput_flow_run_id_a_foreign_.py b/src/prefect/server/database/_migrations/versions/sqlite/2023_12_07_121624_35659cc49969_make_flowruninput_flow_run_id_a_foreign_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2023_12_07_121624_35659cc49969_make_flowruninput_flow_run_id_a_foreign_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2023_12_07_121624_35659cc49969_make_flowruninput_flow_run_id_a_foreign_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_01_05_101041_c63a0a6dc787_add_sender_to_flowruninput.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_01_05_101041_c63a0a6dc787_add_sender_to_flowruninput.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_01_05_101041_c63a0a6dc787_add_sender_to_flowruninput.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_01_05_101041_c63a0a6dc787_add_sender_to_flowruninput.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_01_22_120214_265eb1a2da4c_create_deployment_schedule_and_add_.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_01_22_120214_265eb1a2da4c_create_deployment_schedule_and_add_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_01_22_120214_265eb1a2da4c_create_deployment_schedule_and_add_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_01_22_120214_265eb1a2da4c_create_deployment_schedule_and_add_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_03_05_115258_342220764f0b_add_job_variables_column_to_flow_runs.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_03_05_115258_342220764f0b_add_job_variables_column_to_flow_runs.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_03_05_115258_342220764f0b_add_job_variables_column_to_flow_runs.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_03_05_115258_342220764f0b_add_job_variables_column_to_flow_runs.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_03_13_111316_bacc60edce16_create_csrf_token_toble.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_03_13_111316_bacc60edce16_create_csrf_token_toble.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_03_13_111316_bacc60edce16_create_csrf_token_toble.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_03_13_111316_bacc60edce16_create_csrf_token_toble.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_03_111618_07ed05dfd4ec_automations_models.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_03_111618_07ed05dfd4ec_automations_models.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_03_111618_07ed05dfd4ec_automations_models.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_03_111618_07ed05dfd4ec_automations_models.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_04_114538_8644a9595a08_add_deployment_version_to_flow_run.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_04_114538_8644a9595a08_add_deployment_version_to_flow_run.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_04_114538_8644a9595a08_add_deployment_version_to_flow_run.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_04_114538_8644a9595a08_add_deployment_version_to_flow_run.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_09_125712_cc510aec4689_automation_event_follower.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_09_125712_cc510aec4689_automation_event_follower.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_09_125712_cc510aec4689_automation_event_follower.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_09_125712_cc510aec4689_automation_event_follower.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_09_131832_2b6c2b548f95_trigger_in_index.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_09_131832_2b6c2b548f95_trigger_in_index.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_09_131832_2b6c2b548f95_trigger_in_index.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_09_131832_2b6c2b548f95_trigger_in_index.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_10_104304_824e9edafa60_adds_events_tables.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_10_104304_824e9edafa60_adds_events_tables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_10_104304_824e9edafa60_adds_events_tables.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_10_104304_824e9edafa60_adds_events_tables.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_23_094701_75c8f17b8b51_work_status_fields.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_23_094701_75c8f17b8b51_work_status_fields.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_23_094701_75c8f17b8b51_work_status_fields.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_23_094701_75c8f17b8b51_work_status_fields.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_04_25_155120_a8e62d4c72cf_worker_status_field.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_04_25_155120_a8e62d4c72cf_worker_status_field.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_04_25_155120_a8e62d4c72cf_worker_status_field.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_04_25_155120_a8e62d4c72cf_worker_status_field.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_05_01_103824_20fbd53b3cef_add_catchup_fields_to_deploymentschedule.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_05_01_103824_20fbd53b3cef_add_catchup_fields_to_deploymentschedule.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_05_01_103824_20fbd53b3cef_add_catchup_fields_to_deploymentschedule.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_05_01_103824_20fbd53b3cef_add_catchup_fields_to_deploymentschedule.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_05_21_123101_2ac65f1758c2_json_variables.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_05_21_123101_2ac65f1758c2_json_variables.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_05_21_123101_2ac65f1758c2_json_variables.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_05_21_123101_2ac65f1758c2_json_variables.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_07_15_145350_354f1ede7e9f_adding_scope_to_followers.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_07_15_145350_354f1ede7e9f_adding_scope_to_followers.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_07_15_145350_354f1ede7e9f_adding_scope_to_followers.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_07_15_145350_354f1ede7e9f_adding_scope_to_followers.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_08_14_145052_f93e1439f022_add_deployment_concurrency_limit.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_08_14_145052_f93e1439f022_add_deployment_concurrency_limit.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_08_14_145052_f93e1439f022_add_deployment_concurrency_limit.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_08_14_145052_f93e1439f022_add_deployment_concurrency_limit.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_09_11_090106_7d6350aea855_add_concurrency_options.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_09_11_090106_7d6350aea855_add_concurrency_options.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_09_11_090106_7d6350aea855_add_concurrency_options.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_09_11_090106_7d6350aea855_add_concurrency_options.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_09_16_162719_4ad4658cbefe_add_deployment_to_global_concurrency_.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_09_16_162719_4ad4658cbefe_add_deployment_to_global_concurrency_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_09_16_162719_4ad4658cbefe_add_deployment_to_global_concurrency_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_09_16_162719_4ad4658cbefe_add_deployment_to_global_concurrency_.py diff --git a/src/prefect/server/database/migrations/versions/sqlite/2024_11_15_151042_5952a5498b51_add_labels_column_to_flow_flowrun_.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_11_15_151042_5952a5498b51_add_labels_column_to_flow_flowrun_.py similarity index 100% rename from src/prefect/server/database/migrations/versions/sqlite/2024_11_15_151042_5952a5498b51_add_labels_column_to_flow_flowrun_.py rename to src/prefect/server/database/_migrations/versions/sqlite/2024_11_15_151042_5952a5498b51_add_labels_column_to_flow_flowrun_.py diff --git a/src/prefect/server/database/alembic.ini b/src/prefect/server/database/alembic.ini index 5d8eb592d893..a834d0348775 100644 --- a/src/prefect/server/database/alembic.ini +++ b/src/prefect/server/database/alembic.ini @@ -1,5 +1,5 @@ [alembic] -script_location = prefect:server:database:migrations +script_location = prefect:server:database:_migrations prepend_sys_path = . revision_environment = true diff --git a/src/prefect/server/database/alembic_commands.py b/src/prefect/server/database/alembic_commands.py index 37aa6af5e7e9..bfeec523ee90 100644 --- a/src/prefect/server/database/alembic_commands.py +++ b/src/prefect/server/database/alembic_commands.py @@ -6,7 +6,7 @@ from sqlalchemy.exc import SAWarning -import prefect.server +import prefect.server.database ALEMBIC_LOCK = Lock() diff --git a/src/prefect/server/database/orm_models.py b/src/prefect/server/database/orm_models.py index cfdc3c0645cd..9824bd862cfa 100644 --- a/src/prefect/server/database/orm_models.py +++ b/src/prefect/server/database/orm_models.py @@ -1665,7 +1665,7 @@ def versions_dir(self) -> Path: """Directory containing migrations""" return ( Path(prefect.server.database.__file__).parent - / "migrations" + / "_migrations" / "versions" / "postgresql" ) @@ -1679,7 +1679,7 @@ def versions_dir(self) -> Path: """Directory containing migrations""" return ( Path(prefect.server.database.__file__).parent - / "migrations" + / "_migrations" / "versions" / "sqlite" ) From ae3ae3ea69e971c9880f42c36e2d0c3c2cd9d9b7 Mon Sep 17 00:00:00 2001 From: Alexander Streed Date: Wed, 4 Dec 2024 12:56:12 -0600 Subject: [PATCH 10/92] Improve typing completeness of internal base models (#16217) --- src/prefect/_internal/schemas/bases.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/prefect/_internal/schemas/bases.py b/src/prefect/_internal/schemas/bases.py index 1c41ad033e0d..f85907f51af7 100644 --- a/src/prefect/_internal/schemas/bases.py +++ b/src/prefect/_internal/schemas/bases.py @@ -4,7 +4,7 @@ import datetime import os -from typing import Any, ClassVar, Optional, Set, TypeVar +from typing import Any, ClassVar, Generator, Optional, Set, TypeVar, cast from uuid import UUID, uuid4 import pendulum @@ -33,7 +33,7 @@ class PrefectBaseModel(BaseModel): _reset_fields: ClassVar[Set[str]] = set() - model_config = ConfigDict( + model_config: ClassVar[ConfigDict] = ConfigDict( ser_json_timedelta="float", defer_build=True, extra=( @@ -58,7 +58,7 @@ def __eq__(self, other: Any) -> bool: else: return copy_dict == other - def __rich_repr__(self): + def __rich_repr__(self) -> Generator[tuple[str, Any, Any], None, None]: # Display all of the fields in the model if they differ from the default value for name, field in self.model_fields.items(): value = getattr(self, name) @@ -71,9 +71,11 @@ def __rich_repr__(self): and name == "timestamp" and value ): - value = pendulum.instance(value).isoformat() + value = cast(pendulum.DateTime, pendulum.instance(value)).isoformat() elif isinstance(field.annotation, datetime.datetime) and value: - value = pendulum.instance(value).diff_for_humans() + value = cast( + pendulum.DateTime, pendulum.instance(value) + ).diff_for_humans() yield name, value, field.get_default() @@ -113,11 +115,11 @@ class ObjectBaseModel(IDBaseModel): """ _reset_fields: ClassVar[Set[str]] = {"id", "created", "updated"} - model_config = ConfigDict(from_attributes=True) + model_config: ClassVar[ConfigDict] = ConfigDict(from_attributes=True) created: Optional[DateTime] = Field(default=None, repr=False) updated: Optional[DateTime] = Field(default=None, repr=False) class ActionBaseModel(PrefectBaseModel): - model_config: ConfigDict = ConfigDict(extra="forbid") + model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid") From 5a93993bce64ab05d9e75f0bca8a95bc46f9588c Mon Sep 17 00:00:00 2001 From: nate nowack Date: Wed, 4 Dec 2024 13:01:00 -0600 Subject: [PATCH 11/92] update typing in `blocks.abstract` and `concurrency` (#16212) --- src/prefect/blocks/abstract.py | 22 +++++----- src/prefect/blocks/core.py | 4 +- src/prefect/concurrency/asyncio.py | 15 +++---- src/prefect/concurrency/sync.py | 40 ++++++++++++++--- .../test_acquire_concurrency_slots.py | 8 ++-- tests/concurrency/test_concurrency_asyncio.py | 44 +++++++++---------- tests/concurrency/test_concurrency_sync.py | 9 ++-- .../test_release_concurrency_slots.py | 8 ++-- .../v1/test_increment_concurrency_limits.py | 8 ++-- 9 files changed, 97 insertions(+), 61 deletions(-) diff --git a/src/prefect/blocks/abstract.py b/src/prefect/blocks/abstract.py index c35a8bf5a462..cf30d546a1d1 100644 --- a/src/prefect/blocks/abstract.py +++ b/src/prefect/blocks/abstract.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from contextlib import contextmanager -from logging import Logger +from logging import Logger, LoggerAdapter from pathlib import Path from typing import ( Any, @@ -15,7 +15,7 @@ Union, ) -from typing_extensions import Self +from typing_extensions import Self, TypeAlias from prefect.blocks.core import Block from prefect.exceptions import MissingContextError @@ -23,6 +23,8 @@ T = TypeVar("T") +LoggerOrAdapter: TypeAlias = Union[Logger, LoggerAdapter] + class CredentialsBlock(Block, ABC): """ @@ -34,7 +36,7 @@ class CredentialsBlock(Block, ABC): """ @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the CredentialsBlock is called from within a flow or task run context. @@ -73,10 +75,10 @@ class NotificationBlock(Block, ABC): """ _block_schema_capabilities = ["notify"] - _events_excluded_methods = Block._events_excluded_methods.default + ["notify"] + _events_excluded_methods = Block._events_excluded_methods + ["notify"] @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the NotificationBlock is called from within a flow or task run context. @@ -123,7 +125,7 @@ class JobRun(ABC, Generic[T]): # not a block """ @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the JobRun is called from within a flow or task run context. @@ -158,7 +160,7 @@ class JobBlock(Block, ABC): """ @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the JobBlock is called from within a flow or task run context. @@ -202,7 +204,7 @@ class DatabaseBlock(Block, ABC): """ @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the DatabaseBlock is called from within a flow or task run context. @@ -337,7 +339,7 @@ class ObjectStorageBlock(Block, ABC): """ @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the ObjectStorageBlock is called from within a flow or task run context. @@ -469,7 +471,7 @@ class SecretBlock(Block, ABC): """ @property - def logger(self) -> Logger: + def logger(self) -> LoggerOrAdapter: """ Returns a logger based on whether the SecretBlock is called from within a flow or task run context. diff --git a/src/prefect/blocks/core.py b/src/prefect/blocks/core.py index 5b0b6388bf8c..cdd403bb767c 100644 --- a/src/prefect/blocks/core.py +++ b/src/prefect/blocks/core.py @@ -326,7 +326,9 @@ def block_initialization(self) -> None: # Exclude `save` as it uses the `sync_compatible` decorator and needs to be # decorated directly. - _events_excluded_methods = ["block_initialization", "save", "dict"] + _events_excluded_methods: ClassVar[List[str]] = PrivateAttr( + default=["block_initialization", "save", "dict"] + ) @classmethod def __dispatch_key__(cls): diff --git a/src/prefect/concurrency/asyncio.py b/src/prefect/concurrency/asyncio.py index cb8a751f8aa1..5d419a6c079f 100644 --- a/src/prefect/concurrency/asyncio.py +++ b/src/prefect/concurrency/asyncio.py @@ -17,7 +17,6 @@ from prefect.client.orchestration import get_client from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse from prefect.logging.loggers import get_run_logger -from prefect.utilities.asyncutils import sync_compatible from .context import ConcurrencyContext from .events import ( @@ -79,7 +78,7 @@ async def main(): names = names if isinstance(names, list) else [names] - limits = await _acquire_concurrency_slots( + limits = await _aacquire_concurrency_slots( names, occupy, timeout_seconds=timeout_seconds, @@ -95,7 +94,7 @@ async def main(): finally: occupancy_period = cast(Interval, (pendulum.now("UTC") - acquisition_time)) try: - await _release_concurrency_slots( + await _arelease_concurrency_slots( names, occupy, occupancy_period.total_seconds() ) except anyio.get_cancelled_exc_class(): @@ -138,7 +137,7 @@ async def rate_limit( names = names if isinstance(names, list) else [names] - limits = await _acquire_concurrency_slots( + limits = await _aacquire_concurrency_slots( names, occupy, mode="rate_limit", @@ -149,7 +148,6 @@ async def rate_limit( _emit_concurrency_acquisition_events(limits, occupy) -@sync_compatible @deprecated_parameter( name="create_if_missing", start_date="Sep 2024", @@ -157,10 +155,10 @@ async def rate_limit( when=lambda x: x is not None, help="Limits must be explicitly created before acquiring concurrency slots; see `strict` if you want to enforce this behavior.", ) -async def _acquire_concurrency_slots( +async def _aacquire_concurrency_slots( names: List[str], slots: int, - mode: Union[Literal["concurrency"], Literal["rate_limit"]] = "concurrency", + mode: Literal["concurrency", "rate_limit"] = "concurrency", timeout_seconds: Optional[float] = None, create_if_missing: Optional[bool] = None, max_retries: Optional[int] = None, @@ -199,8 +197,7 @@ async def _acquire_concurrency_slots( return retval -@sync_compatible -async def _release_concurrency_slots( +async def _arelease_concurrency_slots( names: List[str], slots: int, occupancy_seconds: float ) -> List[MinimalConcurrencyLimitResponse]: async with get_client() as client: diff --git a/src/prefect/concurrency/sync.py b/src/prefect/concurrency/sync.py index f7e02415743d..2f6bf47a3df6 100644 --- a/src/prefect/concurrency/sync.py +++ b/src/prefect/concurrency/sync.py @@ -9,6 +9,9 @@ ) import pendulum +from typing_extensions import Literal + +from prefect.utilities.asyncutils import run_coro_as_sync try: from pendulum import Interval @@ -19,8 +22,8 @@ from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse from .asyncio import ( - _acquire_concurrency_slots, - _release_concurrency_slots, + _aacquire_concurrency_slots, + _arelease_concurrency_slots, ) from .events import ( _emit_concurrency_acquisition_events, @@ -30,6 +33,36 @@ T = TypeVar("T") +def _release_concurrency_slots( + names: List[str], slots: int, occupancy_seconds: float +) -> List[MinimalConcurrencyLimitResponse]: + result = run_coro_as_sync( + _arelease_concurrency_slots(names, slots, occupancy_seconds) + ) + if result is None: + raise RuntimeError("Failed to release concurrency slots") + return result + + +def _acquire_concurrency_slots( + names: List[str], + slots: int, + mode: Literal["concurrency", "rate_limit"] = "concurrency", + timeout_seconds: Optional[float] = None, + create_if_missing: Optional[bool] = None, + max_retries: Optional[int] = None, + strict: bool = False, +) -> List[MinimalConcurrencyLimitResponse]: + result = run_coro_as_sync( + _aacquire_concurrency_slots( + names, slots, mode, timeout_seconds, create_if_missing, max_retries, strict + ) + ) + if result is None: + raise RuntimeError("Failed to acquire concurrency slots") + return result + + @contextmanager def concurrency( names: Union[str, List[str]], @@ -81,7 +114,6 @@ def main(): create_if_missing=create_if_missing, strict=strict, max_retries=max_retries, - _sync=True, ) acquisition_time = pendulum.now("UTC") emitted_events = _emit_concurrency_acquisition_events(limits, occupy) @@ -94,7 +126,6 @@ def main(): names, occupy, occupancy_period.total_seconds(), - _sync=True, ) _emit_concurrency_release_events(limits, occupy, emitted_events) @@ -134,6 +165,5 @@ def rate_limit( timeout_seconds=timeout_seconds, create_if_missing=create_if_missing, strict=strict, - _sync=True, ) _emit_concurrency_acquisition_events(limits, occupy) diff --git a/tests/concurrency/test_acquire_concurrency_slots.py b/tests/concurrency/test_acquire_concurrency_slots.py index c8a267f6cf78..a6d1817051a7 100644 --- a/tests/concurrency/test_acquire_concurrency_slots.py +++ b/tests/concurrency/test_acquire_concurrency_slots.py @@ -4,7 +4,9 @@ from httpx import Response from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse -from prefect.concurrency.asyncio import _acquire_concurrency_slots +from prefect.concurrency.asyncio import ( + _aacquire_concurrency_slots, +) async def test_calls_increment_client_method(): @@ -21,7 +23,7 @@ async def test_calls_increment_client_method(): ) increment_concurrency_slots.return_value = response - await _acquire_concurrency_slots( + await _aacquire_concurrency_slots( names=["test-1", "test-2"], slots=1, mode="concurrency" ) increment_concurrency_slots.assert_called_once_with( @@ -46,5 +48,5 @@ async def test_returns_minimal_concurrency_limit(): ) increment_concurrency_slots.return_value = response - result = await _acquire_concurrency_slots(["test-1", "test-2"], 1) + result = await _aacquire_concurrency_slots(["test-1", "test-2"], 1) assert result == limits diff --git a/tests/concurrency/test_concurrency_asyncio.py b/tests/concurrency/test_concurrency_asyncio.py index 56bd4af523aa..ff1306e95e01 100644 --- a/tests/concurrency/test_concurrency_asyncio.py +++ b/tests/concurrency/test_concurrency_asyncio.py @@ -7,8 +7,8 @@ from prefect import flow, task from prefect.concurrency.asyncio import ( ConcurrencySlotAcquisitionError, - _acquire_concurrency_slots, - _release_concurrency_slots, + _aacquire_concurrency_slots, + _arelease_concurrency_slots, concurrency, rate_limit, ) @@ -28,12 +28,12 @@ async def resource_heavy(): assert not executed with mock.patch( - "prefect.concurrency.asyncio._acquire_concurrency_slots", - wraps=_acquire_concurrency_slots, + "prefect.concurrency.asyncio._aacquire_concurrency_slots", + wraps=_aacquire_concurrency_slots, ) as acquire_spy: with mock.patch( - "prefect.concurrency.asyncio._release_concurrency_slots", - wraps=_release_concurrency_slots, + "prefect.concurrency.asyncio._arelease_concurrency_slots", + wraps=_arelease_concurrency_slots, ) as release_spy: await resource_heavy() @@ -93,7 +93,7 @@ async def my_flow(): state = await my_flow(return_state=True) assert state.is_failed() with pytest.raises(ConcurrencySlotAcquisitionError): - await state.result() + await state.result() # type: ignore[reportGeneralTypeIssues] async def test_concurrency_emits_events( @@ -112,7 +112,7 @@ async def resource_heavy(): await resource_heavy() - await asserting_events_worker.drain() + await asserting_events_worker.drain() # type: ignore[reportGeneralTypeIssues] assert isinstance(asserting_events_worker._client, AssertingEventsClient) assert len(asserting_events_worker._client.events) == 4 # 2 acquire, 2 release @@ -221,12 +221,12 @@ async def resource_heavy(): assert not executed with mock.patch( - "prefect.concurrency.asyncio._acquire_concurrency_slots", - wraps=_acquire_concurrency_slots, + "prefect.concurrency.asyncio._aacquire_concurrency_slots", + wraps=_aacquire_concurrency_slots, ) as acquire_spy: with mock.patch( - "prefect.concurrency.asyncio._release_concurrency_slots", - wraps=_release_concurrency_slots, + "prefect.concurrency.asyncio._arelease_concurrency_slots", + wraps=_arelease_concurrency_slots, ) as release_spy: await resource_heavy() @@ -281,7 +281,7 @@ async def my_flow(): state = await my_flow(return_state=True) assert state.is_failed() with pytest.raises(ConcurrencySlotAcquisitionError): - await state.result() + await state.result() # type: ignore[reportGeneralTypeIssues] async def test_rate_limit_emits_events( @@ -296,7 +296,7 @@ async def resource_heavy(): await resource_heavy() - await asserting_events_worker.drain() + await asserting_events_worker.drain() # type: ignore[reportGeneralTypeIssues] assert isinstance(asserting_events_worker._client, AssertingEventsClient) assert len(asserting_events_worker._client.events) == 2 @@ -373,11 +373,11 @@ async def resource_heavy(): assert not executed with mock.patch( - "prefect.concurrency.asyncio._acquire_concurrency_slots", + "prefect.concurrency.sync._acquire_concurrency_slots", wraps=lambda *args, **kwargs: None, ) as acquire_spy: with mock.patch( - "prefect.concurrency.asyncio._release_concurrency_slots", + "prefect.concurrency.sync._arelease_concurrency_slots", wraps=lambda *args, **kwargs: None, ) as release_spy: await resource_heavy() @@ -401,12 +401,12 @@ async def resource_heavy(): assert not executed with mock.patch( - "prefect.concurrency.asyncio._acquire_concurrency_slots", - wraps=_acquire_concurrency_slots, + "prefect.concurrency.asyncio._aacquire_concurrency_slots", + wraps=_aacquire_concurrency_slots, ) as acquire_spy: with mock.patch( - "prefect.concurrency.asyncio._release_concurrency_slots", - wraps=_release_concurrency_slots, + "prefect.concurrency.asyncio._arelease_concurrency_slots", + wraps=_arelease_concurrency_slots, ) as release_spy: await resource_heavy() @@ -443,11 +443,11 @@ async def resource_heavy(): assert not executed with mock.patch( - "prefect.concurrency.asyncio._acquire_concurrency_slots", + "prefect.concurrency.sync._acquire_concurrency_slots", wraps=lambda *args, **kwargs: None, ) as acquire_spy: with mock.patch( - "prefect.concurrency.asyncio._release_concurrency_slots", + "prefect.concurrency.sync._arelease_concurrency_slots", wraps=lambda *args, **kwargs: None, ) as release_spy: await resource_heavy() diff --git a/tests/concurrency/test_concurrency_sync.py b/tests/concurrency/test_concurrency_sync.py index 8be01fba5ed8..13724a097214 100644 --- a/tests/concurrency/test_concurrency_sync.py +++ b/tests/concurrency/test_concurrency_sync.py @@ -5,12 +5,13 @@ from starlette import status from prefect import flow, task -from prefect.concurrency.asyncio import ( - ConcurrencySlotAcquisitionError, +from prefect.concurrency.asyncio import ConcurrencySlotAcquisitionError +from prefect.concurrency.sync import ( _acquire_concurrency_slots, _release_concurrency_slots, + concurrency, + rate_limit, ) -from prefect.concurrency.sync import concurrency, rate_limit from prefect.events.clients import AssertingEventsClient from prefect.events.worker import EventsWorker from prefect.server.schemas.core import ConcurrencyLimitV2 @@ -43,7 +44,6 @@ def resource_heavy(): create_if_missing=None, max_retries=None, strict=False, - _sync=True, ) # On release we calculate how many seconds the slots were occupied @@ -275,7 +275,6 @@ def resource_heavy(): timeout_seconds=None, create_if_missing=None, strict=False, - _sync=True, ) # When used as a rate limit concurrency slots are not explicitly diff --git a/tests/concurrency/test_release_concurrency_slots.py b/tests/concurrency/test_release_concurrency_slots.py index a5f66f2d1a81..98d477f724f3 100644 --- a/tests/concurrency/test_release_concurrency_slots.py +++ b/tests/concurrency/test_release_concurrency_slots.py @@ -4,7 +4,9 @@ from httpx import Response from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse -from prefect.concurrency.asyncio import _release_concurrency_slots +from prefect.concurrency.asyncio import ( + _arelease_concurrency_slots, +) async def test_calls_release_client_method(): @@ -21,7 +23,7 @@ async def test_calls_release_client_method(): ) client_release_concurrency_slots.return_value = response - await _release_concurrency_slots( + await _arelease_concurrency_slots( names=["test-1", "test-2"], slots=1, occupancy_seconds=1.0 ) client_release_concurrency_slots.assert_called_once_with( @@ -45,5 +47,5 @@ async def test_returns_minimal_concurrency_limit(): ) client_release_concurrency_slots.return_value = response - result = await _release_concurrency_slots(["test-1", "test-2"], 1, 1.0) + result = await _arelease_concurrency_slots(["test-1", "test-2"], 1, 1.0) assert result == limits diff --git a/tests/concurrency/v1/test_increment_concurrency_limits.py b/tests/concurrency/v1/test_increment_concurrency_limits.py index 364403410c19..1ee3f0894349 100644 --- a/tests/concurrency/v1/test_increment_concurrency_limits.py +++ b/tests/concurrency/v1/test_increment_concurrency_limits.py @@ -4,7 +4,9 @@ from httpx import Response from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse -from prefect.concurrency.asyncio import _acquire_concurrency_slots +from prefect.concurrency.asyncio import ( + _aacquire_concurrency_slots, +) async def test_calls_increment_client_method(): @@ -25,7 +27,7 @@ async def test_calls_increment_client_method(): ) increment_concurrency_slots.return_value = response - await _acquire_concurrency_slots( + await _aacquire_concurrency_slots( names=["test-1", "test-2"], slots=1, mode="concurrency" ) increment_concurrency_slots.assert_called_once_with( @@ -54,5 +56,5 @@ async def test_returns_minimal_concurrency_limit(): ) increment_concurrency_slots.return_value = response - result = await _acquire_concurrency_slots(["test-1", "test-2"], 1) + result = await _aacquire_concurrency_slots(["test-1", "test-2"], 1) assert result == limits From bd195546c8baa020b173133b968f1f9d460143c0 Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:09:46 -0800 Subject: [PATCH 12/92] [UI v2] feat: Adding Icon component to consolidate available icons for the UI (#16208) --- .../global-concurrency-limit-empty-state.tsx | 4 +- ...task-run-concurrency-limit-empty-state.tsx | 4 +- ui-v2/src/components/flows/cells.tsx | 5 +- ui-v2/src/components/flows/data-table.tsx | 12 +++-- .../flows/detail/deployment-columns.tsx | 4 +- ui-v2/src/components/flows/detail/index.tsx | 7 +-- ui-v2/src/components/ui/button/button.tsx | 8 ++- ui-v2/src/components/ui/docs-link.tsx | 4 +- .../src/components/ui/empty-state.stories.tsx | 4 +- ui-v2/src/components/ui/empty-state.tsx | 7 ++- ui-v2/src/components/ui/icons/README.md | 22 ++++++++ ui-v2/src/components/ui/icons/constants.ts | 51 +++++++++++++++++++ ui-v2/src/components/ui/icons/icon.tsx | 11 ++++ .../src/components/ui/icons/icons.stories.tsx | 50 ++++++++++++++++++ ui-v2/src/components/ui/icons/index.ts | 2 + ui-v2/src/components/ui/input.tsx | 8 +-- ui-v2/src/components/ui/pagination.tsx | 22 ++++---- ui-v2/src/components/ui/sidebar/sidebar.tsx | 17 +++---- ui-v2/src/components/ui/state-badge/index.tsx | 31 +++++------ .../ui/state-badge/state-badge.test.tsx | 32 +++++------- ui-v2/src/components/ui/tag-badge.tsx | 4 +- .../components/variables/data-table/cells.tsx | 16 +++--- .../variables/data-table/search.tsx | 8 +-- .../src/components/variables/empty-state.tsx | 4 +- ui-v2/src/components/variables/layout.tsx | 4 +- 25 files changed, 233 insertions(+), 108 deletions(-) create mode 100644 ui-v2/src/components/ui/icons/README.md create mode 100644 ui-v2/src/components/ui/icons/constants.ts create mode 100644 ui-v2/src/components/ui/icons/icon.tsx create mode 100644 ui-v2/src/components/ui/icons/icons.stories.tsx create mode 100644 ui-v2/src/components/ui/icons/index.ts diff --git a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx b/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx index 90d6dee1ca9c..1759a711e465 100644 --- a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx +++ b/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx @@ -7,7 +7,7 @@ import { EmptyStateIcon, EmptyStateTitle, } from "@/components/ui/empty-state"; -import { PlusIcon } from "lucide-react"; +import { Icon } from "@/components/ui/icons"; type Props = { onClick: () => void; @@ -22,7 +22,7 @@ export const GlobalConcurrencyLimitEmptyState = ({ onClick }: Props) => ( diff --git a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx b/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx index 7256ff8b81a4..7d16f4977196 100644 --- a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx +++ b/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx @@ -7,7 +7,7 @@ import { EmptyStateIcon, EmptyStateTitle, } from "@/components/ui/empty-state"; -import { PlusIcon } from "lucide-react"; +import { Icon } from "@/components/ui/icons"; type Props = { onClick: () => void; @@ -24,7 +24,7 @@ export const TaskRunConcurrencyLimitEmptyState = ({ onClick }: Props) => ( diff --git a/ui-v2/src/components/flows/cells.tsx b/ui-v2/src/components/flows/cells.tsx index 451a13172d6b..842083426968 100644 --- a/ui-v2/src/components/flows/cells.tsx +++ b/ui-v2/src/components/flows/cells.tsx @@ -8,11 +8,12 @@ import { DropdownMenuSeparator, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; +import { Icon } from "@/components/ui/icons"; import { cn } from "@/lib/utils"; import { useQuery } from "@tanstack/react-query"; import { Link } from "@tanstack/react-router"; import { format, parseISO } from "date-fns"; -import { MoreVerticalIcon } from "lucide-react"; + import { deploymentsCountQueryParams, getLatestFlowRunsQueryParams, @@ -85,7 +86,7 @@ export const FlowActionMenu = ({ row }: { row: { original: Flow } }) => { diff --git a/ui-v2/src/components/flows/data-table.tsx b/ui-v2/src/components/flows/data-table.tsx index 4464f0c4d303..e5501d868d97 100644 --- a/ui-v2/src/components/flows/data-table.tsx +++ b/ui-v2/src/components/flows/data-table.tsx @@ -7,6 +7,7 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; +import { Icon } from "@/components/ui/icons"; import { Input } from "@/components/ui/input"; import { useNavigate } from "@tanstack/react-router"; import { @@ -14,10 +15,10 @@ import { getPaginationRowModel, useReactTable, } from "@tanstack/react-table"; -import { ChevronDownIcon, SearchIcon } from "lucide-react"; + +import { useSet } from "@/hooks/use-set"; import { useState } from "react"; import { columns } from "./columns"; -import { useSet } from "@/hooks/use-set"; const SearchComponent = () => { const navigate = useNavigate(); @@ -34,7 +35,8 @@ const SearchComponent = () => { }) } /> - @@ -56,7 +58,7 @@ const FilterComponent = () => { @@ -114,7 +116,7 @@ const SortComponent = () => { diff --git a/ui-v2/src/components/flows/detail/deployment-columns.tsx b/ui-v2/src/components/flows/detail/deployment-columns.tsx index 5966c1cba7f5..ad3dbecedabe 100644 --- a/ui-v2/src/components/flows/detail/deployment-columns.tsx +++ b/ui-v2/src/components/flows/detail/deployment-columns.tsx @@ -6,8 +6,8 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; +import { Icon } from "@/components/ui/icons"; import { ColumnDef } from "@tanstack/react-table"; -import { MoreHorizontal } from "lucide-react"; type Deployment = components["schemas"]["DeploymentResponse"]; @@ -98,7 +98,7 @@ export const columns: ColumnDef[] = [ diff --git a/ui-v2/src/components/flows/detail/index.tsx b/ui-v2/src/components/flows/detail/index.tsx index a5581850f2d7..89ffdd99ba1b 100644 --- a/ui-v2/src/components/flows/detail/index.tsx +++ b/ui-v2/src/components/flows/detail/index.tsx @@ -16,13 +16,13 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; +import { Icon } from "@/components/ui/icons"; import { Input } from "@/components/ui/input"; import { getCoreRowModel, getPaginationRowModel, useReactTable, } from "@tanstack/react-table"; -import { ChevronDownIcon, SearchIcon } from "lucide-react"; const SearchComponent = () => { const navigate = useNavigate(); @@ -42,7 +42,8 @@ const SearchComponent = () => { }) } /> - @@ -57,7 +58,7 @@ const SortComponent = () => { diff --git a/ui-v2/src/components/ui/button/button.tsx b/ui-v2/src/components/ui/button/button.tsx index 76ae8a7d8b05..5c74dfb80696 100644 --- a/ui-v2/src/components/ui/button/button.tsx +++ b/ui-v2/src/components/ui/button/button.tsx @@ -2,9 +2,9 @@ import { Slot } from "@radix-ui/react-slot"; import type { VariantProps } from "class-variance-authority"; import * as React from "react"; +import { Icon } from "@/components/ui/icons"; import { cn } from "@/lib/utils"; import { buttonVariants } from "./styles"; -import { Loader2 } from "lucide-react"; export interface ButtonProps extends React.ButtonHTMLAttributes, @@ -35,7 +35,11 @@ const Button = React.forwardRef( disabled={loading} {...props} > - {loading ? : children} + {loading ? ( + + ) : ( + children + )} ); }, diff --git a/ui-v2/src/components/ui/docs-link.tsx b/ui-v2/src/components/ui/docs-link.tsx index c2dc4ee6ad7a..48cb63576219 100644 --- a/ui-v2/src/components/ui/docs-link.tsx +++ b/ui-v2/src/components/ui/docs-link.tsx @@ -1,4 +1,4 @@ -import { ExternalLinkIcon } from "lucide-react"; +import { Icon } from "./icons"; import { Button } from "./button"; @@ -20,7 +20,7 @@ export const DocsLink = ({ id }: Props): JSX.Element => { return ( ); diff --git a/ui-v2/src/components/ui/empty-state.stories.tsx b/ui-v2/src/components/ui/empty-state.stories.tsx index d28e25207df1..dcbe9ea20647 100644 --- a/ui-v2/src/components/ui/empty-state.stories.tsx +++ b/ui-v2/src/components/ui/empty-state.stories.tsx @@ -1,6 +1,6 @@ import type { Meta, StoryObj } from "@storybook/react"; -import { PlusIcon } from "lucide-react"; +import { Icon } from "@/components/ui/icons"; import { Button } from "./button"; import { DocsLink } from "./docs-link"; import { @@ -40,7 +40,7 @@ function EmptyStateExample(): JSX.Element { diff --git a/ui-v2/src/components/ui/empty-state.tsx b/ui-v2/src/components/ui/empty-state.tsx index 50104fb483ca..65ec02571350 100644 --- a/ui-v2/src/components/ui/empty-state.tsx +++ b/ui-v2/src/components/ui/empty-state.tsx @@ -1,9 +1,8 @@ import { Card, CardContent } from "@/components/ui/card"; -import { icons } from "lucide-react"; +import { Icon, type IconId } from "./icons"; -const EmptyStateIcon = ({ id }: { id: keyof typeof icons }): JSX.Element => { - const LucideIcon = icons[id]; - return ; +const EmptyStateIcon = ({ id }: { id: IconId }): JSX.Element => { + return ; }; const EmptyStateTitle = ({ children, diff --git a/ui-v2/src/components/ui/icons/README.md b/ui-v2/src/components/ui/icons/README.md new file mode 100644 index 000000000000..57b061c4c444 --- /dev/null +++ b/ui-v2/src/components/ui/icons/README.md @@ -0,0 +1,22 @@ +# Adding Icons + +Import new icon from https://lucide.dev/icons/ in `./constants` and add entry alphabetically in `ICONS` + +Use the named import that **excludes** `Icon` suffix. eg: `BanIcon` VS `Ban` + +```ts +/** @/components/ui/icons/constants.ts */ +import { + AlignVerticalJustifyStart, + Ban, + Check, // <---- New Icon to add + ChevronDown, +} from "lucide-react"; + +export const ICONS = { + AlignVerticalJustifyStart, + Ban, + Check, // <---- New Icon to add + ChevronDown, +} as const; +``` \ No newline at end of file diff --git a/ui-v2/src/components/ui/icons/constants.ts b/ui-v2/src/components/ui/icons/constants.ts new file mode 100644 index 000000000000..60154b96c925 --- /dev/null +++ b/ui-v2/src/components/ui/icons/constants.ts @@ -0,0 +1,51 @@ +import { + AlignVerticalJustifyStart, + Ban, + Check, + ChevronDown, + ChevronLeft, + ChevronRight, + ChevronsLeft, + ChevronsRight, + CircleArrowOutUpRight, + Clock, + ExternalLink, + Loader2, + MoreHorizontal, + MoreVertical, + PanelLeft, + Pause, + Play, + Plus, + Search, + ServerCrash, + Variable, + X, +} from "lucide-react"; + +export const ICONS = { + AlignVerticalJustifyStart, + Ban, + Check, + ChevronDown, + ChevronLeft, + ChevronRight, + ChevronsLeft, + ChevronsRight, + CircleArrowOutUpRight, + Clock, + ExternalLink, + Loader2, + MoreHorizontal, + MoreVertical, + PanelLeft, + Pause, + Play, + Plus, + Search, + ServerCrash, + Variable, + X, +} as const; + +export type IconId = keyof typeof ICONS; diff --git a/ui-v2/src/components/ui/icons/icon.tsx b/ui-v2/src/components/ui/icons/icon.tsx new file mode 100644 index 000000000000..b7e1320e8de2 --- /dev/null +++ b/ui-v2/src/components/ui/icons/icon.tsx @@ -0,0 +1,11 @@ +import { type LucideProps } from "lucide-react"; +import { ICONS, type IconId } from "./constants"; + +type Props = { + id: IconId; +} & LucideProps; + +export const Icon = ({ id, ...rest }: Props) => { + const IconComponent = ICONS[id]; + return ; +}; diff --git a/ui-v2/src/components/ui/icons/icons.stories.tsx b/ui-v2/src/components/ui/icons/icons.stories.tsx new file mode 100644 index 000000000000..fb9d144b7f89 --- /dev/null +++ b/ui-v2/src/components/ui/icons/icons.stories.tsx @@ -0,0 +1,50 @@ +import { Label } from "@/components/ui/label"; +import type { Meta, StoryObj } from "@storybook/react"; + +import { ICONS, type IconId } from "./constants"; +import { Icon } from "./icon"; + +const meta: Meta = { + title: "UI/Icon", + component: Icon, + args: { + id: "Ban", + }, +}; + +export default meta; + +type Story = StoryObj; + +export const Icons: Story = { + render: () => , +}; + +function IconCatalog(): JSX.Element { + return ( +
+ {Object.keys(ICONS).map((id) => { + return ( +
+
+ +
+
+ +
+
+ ); + })} +
+ ); +} + +export const Usage: Story = { + args: { id: "Ban" }, +}; diff --git a/ui-v2/src/components/ui/icons/index.ts b/ui-v2/src/components/ui/icons/index.ts new file mode 100644 index 000000000000..3798532436ad --- /dev/null +++ b/ui-v2/src/components/ui/icons/index.ts @@ -0,0 +1,2 @@ +export { Icon } from "./icon"; +export { ICONS, type IconId } from "./constants"; diff --git a/ui-v2/src/components/ui/input.tsx b/ui-v2/src/components/ui/input.tsx index c92f9d0e0e96..ae3e030786df 100644 --- a/ui-v2/src/components/ui/input.tsx +++ b/ui-v2/src/components/ui/input.tsx @@ -1,8 +1,8 @@ -import * as React from "react"; -import { useEffect, useState } from "react"; import useDebounce from "@/hooks/use-debounce"; import { cn } from "@/lib/utils"; -import { SearchIcon } from "lucide-react"; +import * as React from "react"; +import { useEffect, useState } from "react"; +import { ICONS } from "./icons"; type InputProps = React.ComponentProps<"input"> & { className?: string; @@ -66,7 +66,7 @@ const SearchInput = React.forwardRef( return ( & { className?: string; @@ -89,7 +85,7 @@ const PaginationPrevious = ({ className={cn("gap-1 pl-2.5", className)} {...props} > - + Previous ); @@ -106,7 +102,7 @@ const PaginationPreviousButton = ({ className={cn("gap-1 pl-2.5", className)} {...props} > - + ); PaginationPreviousButton.displayName = "PaginationPreviousButton"; @@ -122,7 +118,7 @@ const PaginationNext = ({ {...props} > Next - + ); PaginationNext.displayName = "PaginationNext"; @@ -138,7 +134,7 @@ const PaginationNextButton = ({ className={cn("gap-1 pr-2.5", className)} {...props} > - + ); PaginationNextButton.displayName = "PaginationNextButton"; @@ -173,7 +169,7 @@ const PaginationFirstButton = ({ className={cn("gap-1 pl-2.5", className)} {...props} > - + ); PaginationFirstButton.displayName = "PaginationFirstButton"; @@ -189,7 +185,7 @@ const PaginationLastButton = ({ className={cn("gap-1 pr-2.5", className)} {...props} > - + ); diff --git a/ui-v2/src/components/ui/sidebar/sidebar.tsx b/ui-v2/src/components/ui/sidebar/sidebar.tsx index 7bff3be677e0..7dc46100d591 100644 --- a/ui-v2/src/components/ui/sidebar/sidebar.tsx +++ b/ui-v2/src/components/ui/sidebar/sidebar.tsx @@ -1,11 +1,5 @@ -import * as React from "react"; -import { Slot } from "@radix-ui/react-slot"; -import { VariantProps, cva } from "class-variance-authority"; -import { PanelLeft } from "lucide-react"; - -import { useIsMobile } from "@/hooks/use-mobile"; -import { cn } from "@/lib/utils"; import { Button } from "@/components/ui/button"; +import { Icon } from "@/components/ui/icons"; import { Input } from "@/components/ui/input"; import { Separator } from "@/components/ui/separator"; import { Sheet, SheetContent } from "@/components/ui/sheet"; @@ -16,7 +10,12 @@ import { TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; -import { useSidebar, SidebarContext } from "./hooks"; +import { useIsMobile } from "@/hooks/use-mobile"; +import { cn } from "@/lib/utils"; +import { Slot } from "@radix-ui/react-slot"; +import { VariantProps, cva } from "class-variance-authority"; +import * as React from "react"; +import { SidebarContext, useSidebar } from "./hooks"; const SIDEBAR_COOKIE_NAME = "sidebar:state"; const SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 7; @@ -272,7 +271,7 @@ const SidebarTrigger = React.forwardRef< }} {...props} > - + Toggle Sidebar ); diff --git a/ui-v2/src/components/ui/state-badge/index.tsx b/ui-v2/src/components/ui/state-badge/index.tsx index 0b7d622b062e..d8e982016017 100644 --- a/ui-v2/src/components/ui/state-badge/index.tsx +++ b/ui-v2/src/components/ui/state-badge/index.tsx @@ -1,26 +1,20 @@ import type { components } from "@/api/prefect"; import { cva } from "class-variance-authority"; -import { - BanIcon, - CheckIcon, - ClockIcon, - PauseIcon, - PlayIcon, - ServerCrashIcon, - XIcon, -} from "lucide-react"; + +import { ICONS as COMPONENT_ICONS } from "@/components/ui/icons"; + import { Badge } from "../badge"; const ICONS = { - COMPLETED: CheckIcon, - FAILED: XIcon, - RUNNING: PlayIcon, - CANCELLED: BanIcon, - CANCELLING: BanIcon, - CRASHED: ServerCrashIcon, - PAUSED: PauseIcon, - PENDING: ClockIcon, - SCHEDULED: ClockIcon, + COMPLETED: COMPONENT_ICONS.Check, + FAILED: COMPONENT_ICONS.X, + RUNNING: COMPONENT_ICONS.Play, + CANCELLED: COMPONENT_ICONS.Ban, + CANCELLING: COMPONENT_ICONS.Ban, + CRASHED: COMPONENT_ICONS.ServerCrash, + PAUSED: COMPONENT_ICONS.Pause, + PENDING: COMPONENT_ICONS.Clock, + SCHEDULED: COMPONENT_ICONS.Clock, } as const satisfies Record< components["schemas"]["StateType"], React.ElementType @@ -49,7 +43,6 @@ export const StateBadge = ({ return ( - {state.name} ); diff --git a/ui-v2/src/components/ui/state-badge/state-badge.test.tsx b/ui-v2/src/components/ui/state-badge/state-badge.test.tsx index da73b15ec0fd..0ff9dee251b8 100644 --- a/ui-v2/src/components/ui/state-badge/state-badge.test.tsx +++ b/ui-v2/src/components/ui/state-badge/state-badge.test.tsx @@ -1,13 +1,7 @@ import { render, screen } from "@testing-library/react"; -import { - BanIcon, - CheckIcon, - ClockIcon, - PauseIcon, - PlayIcon, - ServerCrashIcon, - XIcon, -} from "lucide-react"; + +import { ICONS } from "@/components/ui/icons"; + import { describe, expect, test } from "vitest"; import { StateBadge } from "./index"; @@ -16,52 +10,52 @@ describe("StateBadge", () => { { type: "COMPLETED" as const, name: "Completed", - expectedIcon: CheckIcon, + expectedIcon: ICONS.Check, }, { type: "FAILED" as const, name: "Failed", - expectedIcon: XIcon, + expectedIcon: ICONS.X, }, { type: "RUNNING" as const, name: "Running", - expectedIcon: PlayIcon, + expectedIcon: ICONS.Play, }, { type: "CANCELLED" as const, name: "Cancelled", - expectedIcon: BanIcon, + expectedIcon: ICONS.Ban, }, { type: "CANCELLING" as const, name: "Cancelling", - expectedIcon: BanIcon, + expectedIcon: ICONS.Ban, }, { type: "CRASHED" as const, name: "Crashed", - expectedIcon: ServerCrashIcon, + expectedIcon: ICONS.ServerCrash, }, { type: "PAUSED" as const, name: "Paused", - expectedIcon: PauseIcon, + expectedIcon: ICONS.Pause, }, { type: "PENDING" as const, name: "Pending", - expectedIcon: ClockIcon, + expectedIcon: ICONS.Clock, }, { type: "SCHEDULED" as const, name: "Scheduled", - expectedIcon: ClockIcon, + expectedIcon: ICONS.Clock, }, { type: "SCHEDULED" as const, name: "Late", - expectedIcon: ClockIcon, + expectedIcon: ICONS.Clock, }, ]; diff --git a/ui-v2/src/components/ui/tag-badge.tsx b/ui-v2/src/components/ui/tag-badge.tsx index 0d3179971f36..de23e8bc7e31 100644 --- a/ui-v2/src/components/ui/tag-badge.tsx +++ b/ui-v2/src/components/ui/tag-badge.tsx @@ -1,4 +1,4 @@ -import { X } from "lucide-react"; +import { Icon } from "@/components/ui/icons"; import { Badge, type BadgeProps } from "./badge"; type TagBadgeProps = { @@ -18,7 +18,7 @@ export const TagBadge = ({ tag, variant, onRemove }: TagBadgeProps) => { className="text-muted-foreground hover:text-foreground" aria-label={`Remove ${tag} tag`} > - + )} diff --git a/ui-v2/src/components/variables/data-table/cells.tsx b/ui-v2/src/components/variables/data-table/cells.tsx index 88657758d642..591a33bcd539 100644 --- a/ui-v2/src/components/variables/data-table/cells.tsx +++ b/ui-v2/src/components/variables/data-table/cells.tsx @@ -1,3 +1,5 @@ +import type { components } from "@/api/prefect"; +import { Button } from "@/components/ui/button"; import { DropdownMenu, DropdownMenuContent, @@ -5,20 +7,18 @@ import { DropdownMenuLabel, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; -import { Button } from "@/components/ui/button"; -import { MoreVerticalIcon } from "lucide-react"; -import type { CellContext } from "@tanstack/react-table"; -import type { components } from "@/api/prefect"; -import { useToast } from "@/hooks/use-toast"; -import { JsonInput } from "@/components/ui/json-input"; import { HoverCard, HoverCardContent, HoverCardTrigger, } from "@/components/ui/hover-card"; -import { useRef } from "react"; +import { Icon } from "@/components/ui/icons"; +import { JsonInput } from "@/components/ui/json-input"; import { useIsOverflowing } from "@/hooks/use-is-overflowing"; +import { useToast } from "@/hooks/use-toast"; import { useDeleteVariable } from "@/hooks/variables"; +import type { CellContext } from "@tanstack/react-table"; +import { useRef } from "react"; type ActionsCellProps = CellContext< components["schemas"]["Variable"], @@ -49,7 +49,7 @@ export const ActionsCell = ({ row, onVariableEdit }: ActionsCellProps) => { diff --git a/ui-v2/src/components/variables/data-table/search.tsx b/ui-v2/src/components/variables/data-table/search.tsx index 9a611cec5168..349d6dd5421b 100644 --- a/ui-v2/src/components/variables/data-table/search.tsx +++ b/ui-v2/src/components/variables/data-table/search.tsx @@ -1,14 +1,14 @@ +import { Icon } from "@/components/ui/icons"; import { IconInput } from "@/components/ui/input"; import { Select, + SelectContent, + SelectItem, SelectTrigger, SelectValue, - SelectItem, - SelectContent, } from "@/components/ui/select"; import { TagsInput } from "@/components/ui/tags-input"; import useDebounce from "@/hooks/use-debounce"; -import { SearchIcon } from "lucide-react"; import { useEffect, useState } from "react"; type VariablesDataTableSearchProps = { @@ -32,7 +32,7 @@ export const VariablesDataTableSearch = ({ return (
} placeholder="Search variables" value={searchValue} onChange={(e) => setSearchValue(e.target.value)} diff --git a/ui-v2/src/components/variables/empty-state.tsx b/ui-v2/src/components/variables/empty-state.tsx index 123c52348cf9..c12adeb5b727 100644 --- a/ui-v2/src/components/variables/empty-state.tsx +++ b/ui-v2/src/components/variables/empty-state.tsx @@ -7,7 +7,7 @@ import { EmptyStateIcon, EmptyStateTitle, } from "@/components/ui/empty-state"; -import { PlusIcon } from "lucide-react"; +import { Icon } from "@/components/ui/icons"; type VariablesEmptyStateProps = { onAddVariableClick: () => void; @@ -23,7 +23,7 @@ export const VariablesEmptyState = ({ diff --git a/ui-v2/src/components/variables/layout.tsx b/ui-v2/src/components/variables/layout.tsx index 12f3d5e192b9..ee2ba683846b 100644 --- a/ui-v2/src/components/variables/layout.tsx +++ b/ui-v2/src/components/variables/layout.tsx @@ -4,7 +4,7 @@ import { BreadcrumbList, } from "@/components/ui/breadcrumb"; import { Button } from "@/components/ui/button"; -import { PlusIcon } from "lucide-react"; +import { Icon } from "@/components/ui/icons"; export const VariablesLayout = ({ onAddVariableClick, @@ -29,7 +29,7 @@ export const VariablesLayout = ({ variant="outline" onClick={() => onAddVariableClick()} > - +
{children} From be581bb246fe8a85965fccd539b46c64add08e95 Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Wed, 4 Dec 2024 14:07:37 -0800 Subject: [PATCH 13/92] [UI v2] feat: Adds global concurrency limit query definitions (#16220) --- .../hooks/global-concurrency-limits.test.tsx | 115 ++++++++++++++++++ ui-v2/src/hooks/global-concurrency-limits.ts | 80 ++++++++++++ 2 files changed, 195 insertions(+) create mode 100644 ui-v2/src/hooks/global-concurrency-limits.test.tsx create mode 100644 ui-v2/src/hooks/global-concurrency-limits.ts diff --git a/ui-v2/src/hooks/global-concurrency-limits.test.tsx b/ui-v2/src/hooks/global-concurrency-limits.test.tsx new file mode 100644 index 000000000000..a9675017440c --- /dev/null +++ b/ui-v2/src/hooks/global-concurrency-limits.test.tsx @@ -0,0 +1,115 @@ +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { renderHook, waitFor } from "@testing-library/react"; +import { http, HttpResponse } from "msw"; +import { describe, expect, it } from "vitest"; + +import { + type GlobalConcurrencyLimit, + useGetGlobalConcurrencyLimit, + useListGlobalConcurrencyLimits, +} from "./global-concurrency-limits"; + +import { server } from "../../tests/mocks/node"; + +describe("global concurrency limits hooks", () => { + const seedGlobalConcurrencyLimits = () => [ + { + id: "0", + created: "2021-01-01T00:00:00Z", + updated: "2021-01-01T00:00:00Z", + active: false, + name: "global concurrency limit 0", + limit: 0, + active_slots: 0, + slot_decay_per_second: 0, + }, + ]; + + const seedGlobalConcurrencyLimitDetails = () => ({ + id: "0", + created: "2021-01-01T00:00:00Z", + updated: "2021-01-01T00:00:00Z", + active: false, + name: "global concurrency limit 0", + limit: 0, + active_slots: 0, + slot_decay_per_second: 0, + }); + + const mockFetchGlobalConcurrencyLimitsAPI = ( + globalConcurrencyLimits: Array, + ) => { + server.use( + http.post( + "http://localhost:4200/api/v2/concurrency_limits/filter", + () => { + return HttpResponse.json(globalConcurrencyLimits); + }, + ), + ); + }; + + const mockFetchGlobalConcurrencyLimitDetailsAPI = ( + globalConcurrencyLimit: GlobalConcurrencyLimit, + ) => { + server.use( + http.get( + "http://localhost:4200/api/v2/concurrency_limits/:id_or_name", + () => { + return HttpResponse.json(globalConcurrencyLimit); + }, + ), + ); + }; + + const createQueryWrapper = ({ queryClient = new QueryClient() }) => { + const QueryWrapper = ({ children }: { children: React.ReactNode }) => ( + {children} + ); + return QueryWrapper; + }; + + const filter = { + offset: 0, + }; + + /** + * Data Management: + * - Asserts global concurrency limit list data is fetched based on the APIs invoked for the hook + */ + it("is stores list data into the appropriate list query when using useQuery()", async () => { + // ------------ Mock API requests when cache is empty + const mockList = seedGlobalConcurrencyLimits(); + mockFetchGlobalConcurrencyLimitsAPI(mockList); + + // ------------ Initialize hooks to test + const { result } = renderHook( + () => useListGlobalConcurrencyLimits(filter), + { wrapper: createQueryWrapper({}) }, + ); + + // ------------ Assert + await waitFor(() => expect(result.current.isSuccess).toBe(true)); + expect(result.current.data).toEqual(mockList); + }); + + /** + * Data Management: + * - Asserts global concurrency limit details data is fetched based on the APIs invoked for the hook + */ + it("is stores details data into the appropriate details query when using useQuery()", async () => { + // ------------ Mock API requests when cache is empty + const mockDetails = seedGlobalConcurrencyLimitDetails(); + mockFetchGlobalConcurrencyLimitDetailsAPI(mockDetails); + + // ------------ Initialize hooks to test + const { result } = renderHook( + () => useGetGlobalConcurrencyLimit(mockDetails.id), + { wrapper: createQueryWrapper({}) }, + ); + + // ------------ Assert + await waitFor(() => expect(result.current.isSuccess).toBe(true)); + expect(result.current.data).toEqual(mockDetails); + }); +}); diff --git a/ui-v2/src/hooks/global-concurrency-limits.ts b/ui-v2/src/hooks/global-concurrency-limits.ts new file mode 100644 index 000000000000..2542f8c0bab7 --- /dev/null +++ b/ui-v2/src/hooks/global-concurrency-limits.ts @@ -0,0 +1,80 @@ +import type { components } from "@/api/prefect"; +import { getQueryService } from "@/api/service"; +import { queryOptions, useQuery } from "@tanstack/react-query"; + +export type GlobalConcurrencyLimit = + components["schemas"]["GlobalConcurrencyLimitResponse"]; +export type GlobalConcurrencyLimitsFilter = + components["schemas"]["Body_read_all_concurrency_limits_v2_v2_concurrency_limits_filter_post"]; + +/** + * ``` + * 🏗️ Variable queries construction 👷 + * all => ['global-concurrency-limits'] // key to match ['global-concurrency-limits', ... + * list => ['global-concurrency-limits', 'list'] // key to match ['global-concurrency-limits', 'list', ... + * ['global-concurrency-limits', 'list', { ...filter1 }] + * ['global-concurrency-limits', 'list', { ...filter2 }] + * details => ['global-concurrency-limits', 'details'] // key to match ['global-concurrency-limits', 'details', ...] + * ['global-concurrency-limits', 'details', { ...globalConcurrencyLimit1 }] + * ['global-concurrency-limits', 'details', { ...globalConcurrencyLimit2 }] + * ``` + * */ +const queryKeyFactory = { + all: () => ["global-concurrency-limits"] as const, + lists: () => [...queryKeyFactory.all(), "list"] as const, + list: (filter: GlobalConcurrencyLimitsFilter) => + [...queryKeyFactory.lists(), filter] as const, + details: () => [...queryKeyFactory.all(), "details"] as const, + detail: (id_or_name: string) => + [...queryKeyFactory.details(), id_or_name] as const, +}; + +// ----- 🔑 Queries 🗄️ +// ---------------------------- +export const buildListGlobalConcurrencyLimitsQuery = ( + filter: GlobalConcurrencyLimitsFilter, +) => + queryOptions({ + queryKey: queryKeyFactory.list(filter), + queryFn: async () => { + const res = await getQueryService().POST( + "/v2/concurrency_limits/filter", + { body: filter }, + ); + return res.data ?? []; + }, + }); + +export const buildGetGlobalConcurrencyLimitQuery = (id_or_name: string) => + queryOptions({ + queryKey: queryKeyFactory.detail(id_or_name), + queryFn: async () => { + const res = await getQueryService().GET( + "/v2/concurrency_limits/{id_or_name}", + { params: { path: { id_or_name } } }, + ); + return res.data ?? null; + }, + }); + +/** + * + * @param filter + * @returns list of global concurrency limits as a QueryResult object + */ +export const useListGlobalConcurrencyLimits = ( + filter: GlobalConcurrencyLimitsFilter, +) => useQuery(buildListGlobalConcurrencyLimitsQuery(filter)); + +/** + * + * @param id_or_name + * @returns details about the specified global concurrency limit as a QueryResult object + */ +export const useGetGlobalConcurrencyLimit = (id_or_name: string) => + useQuery(buildGetGlobalConcurrencyLimitQuery(id_or_name)); + +// ----- ✍🏼 Mutations 🗄️ +// ---------------------------- + +// TODO: From 63082078ca6d94095299c2c1a6d583867a667a86 Mon Sep 17 00:00:00 2001 From: nate nowack Date: Wed, 4 Dec 2024 21:35:21 -0600 Subject: [PATCH 14/92] strict type client modules (#16223) --- src/prefect/client/base.py | 32 ++++++++++++++--------------- src/prefect/client/cloud.py | 11 ++++++---- src/prefect/client/collections.py | 4 ++-- src/prefect/client/subscriptions.py | 26 ++++++++++++++++------- src/prefect/client/utilities.py | 19 +++++++++-------- src/prefect/utilities/asyncutils.py | 5 ++++- src/prefect/utilities/math.py | 14 ++++++++----- 7 files changed, 68 insertions(+), 43 deletions(-) diff --git a/src/prefect/client/base.py b/src/prefect/client/base.py index 4a018e2f7352..5071387668aa 100644 --- a/src/prefect/client/base.py +++ b/src/prefect/client/base.py @@ -161,7 +161,7 @@ class PrefectResponse(httpx.Response): Provides more informative error messages. """ - def raise_for_status(self) -> None: + def raise_for_status(self) -> Response: """ Raise an exception if the response contains an HTTPStatusError. @@ -174,7 +174,7 @@ def raise_for_status(self) -> None: raise PrefectHTTPStatusError.from_httpx_error(exc) from exc.__cause__ @classmethod - def from_httpx_response(cls: Type[Self], response: httpx.Response) -> Self: + def from_httpx_response(cls: Type[Self], response: httpx.Response) -> Response: """ Create a `PrefectReponse` from an `httpx.Response`. @@ -200,10 +200,10 @@ class PrefectHttpxAsyncClient(httpx.AsyncClient): def __init__( self, - *args, + *args: Any, enable_csrf_support: bool = False, raise_on_all_errors: bool = True, - **kwargs, + **kwargs: Any, ): self.enable_csrf_support: bool = enable_csrf_support self.csrf_token: Optional[str] = None @@ -222,10 +222,10 @@ async def _send_with_retry( self, request: Request, send: Callable[[Request], Awaitable[Response]], - send_args: Tuple, - send_kwargs: Dict, + send_args: Tuple[Any, ...], + send_kwargs: Dict[str, Any], retry_codes: Set[int] = set(), - retry_exceptions: Tuple[Exception, ...] = tuple(), + retry_exceptions: Tuple[Type[Exception], ...] = tuple(), ): """ Send a request and retry it if it fails. @@ -297,7 +297,7 @@ async def _send_with_retry( if exc_info else ( "Received response with retryable status code" - f" {response.status_code}. " + f" {response.status_code if response else 'unknown'}. " ) ) + f"Another attempt will be made in {retry_seconds}s. " @@ -314,7 +314,7 @@ async def _send_with_retry( # We ran out of retries, return the failed response return response - async def send(self, request: Request, *args, **kwargs) -> Response: + async def send(self, request: Request, *args: Any, **kwargs: Any) -> Response: """ Send a request with automatic retry behavior for the following status codes: @@ -414,10 +414,10 @@ class PrefectHttpxSyncClient(httpx.Client): def __init__( self, - *args, + *args: Any, enable_csrf_support: bool = False, raise_on_all_errors: bool = True, - **kwargs, + **kwargs: Any, ): self.enable_csrf_support: bool = enable_csrf_support self.csrf_token: Optional[str] = None @@ -436,10 +436,10 @@ def _send_with_retry( self, request: Request, send: Callable[[Request], Response], - send_args: Tuple, - send_kwargs: Dict, + send_args: Tuple[Any, ...], + send_kwargs: Dict[str, Any], retry_codes: Set[int] = set(), - retry_exceptions: Tuple[Exception, ...] = tuple(), + retry_exceptions: Tuple[Type[Exception], ...] = tuple(), ): """ Send a request and retry it if it fails. @@ -511,7 +511,7 @@ def _send_with_retry( if exc_info else ( "Received response with retryable status code" - f" {response.status_code}. " + f" {response.status_code if response else 'unknown'}. " ) ) + f"Another attempt will be made in {retry_seconds}s. " @@ -528,7 +528,7 @@ def _send_with_retry( # We ran out of retries, return the failed response return response - def send(self, request: Request, *args, **kwargs) -> Response: + def send(self, request: Request, *args: Any, **kwargs: Any) -> Response: """ Send a request with automatic retry behavior for the following status codes: diff --git a/src/prefect/client/cloud.py b/src/prefect/client/cloud.py index 38a69150e922..6542393ed4b7 100644 --- a/src/prefect/client/cloud.py +++ b/src/prefect/client/cloud.py @@ -30,7 +30,7 @@ def get_cloud_client( host: Optional[str] = None, api_key: Optional[str] = None, - httpx_settings: Optional[dict] = None, + httpx_settings: Optional[Dict[str, Any]] = None, infer_cloud_url: bool = False, ) -> "CloudClient": """ @@ -45,6 +45,9 @@ def get_cloud_client( configured_url = prefect.settings.PREFECT_API_URL.value() host = re.sub(PARSE_API_URL_REGEX, "", configured_url) + if host is None: + raise ValueError("Host was not provided and could not be inferred") + return CloudClient( host=host, api_key=api_key or PREFECT_API_KEY.value(), @@ -176,7 +179,7 @@ async def __aenter__(self): await self._client.__aenter__() return self - async def __aexit__(self, *exc_info): + async def __aexit__(self, *exc_info: Any) -> None: return await self._client.__aexit__(*exc_info) def __enter__(self): @@ -188,10 +191,10 @@ def __enter__(self): def __exit__(self, *_): assert False, "This should never be called but must be defined for __enter__" - async def get(self, route, **kwargs): + async def get(self, route: str, **kwargs: Any) -> Any: return await self.request("GET", route, **kwargs) - async def request(self, method, route, **kwargs): + async def request(self, method: str, route: str, **kwargs: Any) -> Any: try: res = await self._client.request(method, route, **kwargs) res.raise_for_status() diff --git a/src/prefect/client/collections.py b/src/prefect/client/collections.py index 12285d50a3d1..e5bd79f04325 100644 --- a/src/prefect/client/collections.py +++ b/src/prefect/client/collections.py @@ -13,12 +13,12 @@ async def read_worker_metadata(self) -> Dict[str, Any]: async def __aenter__(self) -> "CollectionsMetadataClient": ... - async def __aexit__(self, *exc_info) -> Any: + async def __aexit__(self, *exc_info: Any) -> Any: ... def get_collections_metadata_client( - httpx_settings: Optional[Dict] = None, + httpx_settings: Optional[Dict[str, Any]] = None, ) -> "CollectionsMetadataClient": """ Creates a client that can be used to fetch metadata for diff --git a/src/prefect/client/subscriptions.py b/src/prefect/client/subscriptions.py index c2ebf0ab673e..d13873e14b05 100644 --- a/src/prefect/client/subscriptions.py +++ b/src/prefect/client/subscriptions.py @@ -27,27 +27,33 @@ def __init__( ): self.model = model self.client_id = client_id - base_url = base_url.replace("http", "ws", 1) + base_url = base_url.replace("http", "ws", 1) if base_url else None self.subscription_url = f"{base_url}{path}" self.keys = list(keys) self._connect = websockets.connect( self.subscription_url, - subprotocols=["prefect"], + subprotocols=[websockets.Subprotocol("prefect")], ) self._websocket = None def __aiter__(self) -> Self: return self + @property + def websocket(self) -> websockets.WebSocketClientProtocol: + if not self._websocket: + raise RuntimeError("Subscription is not connected") + return self._websocket + async def __anext__(self) -> S: while True: try: await self._ensure_connected() - message = await self._websocket.recv() + message = await self.websocket.recv() - await self._websocket.send(orjson.dumps({"type": "ack"}).decode()) + await self.websocket.send(orjson.dumps({"type": "ack"}).decode()) return self.model.model_validate_json(message) except ( @@ -84,13 +90,19 @@ async def _ensure_connected(self): AssertionError, websockets.exceptions.ConnectionClosedError, ) as e: - if isinstance(e, AssertionError) or e.rcvd.code == WS_1008_POLICY_VIOLATION: + if isinstance(e, AssertionError) or ( + e.rcvd and e.rcvd.code == WS_1008_POLICY_VIOLATION + ): if isinstance(e, AssertionError): reason = e.args[0] - elif isinstance(e, websockets.exceptions.ConnectionClosedError): + elif e.rcvd and e.rcvd.reason: reason = e.rcvd.reason + else: + reason = "unknown" + else: + reason = None - if isinstance(e, AssertionError) or e.rcvd.code == WS_1008_POLICY_VIOLATION: + if reason: raise Exception( "Unable to authenticate to the subscription. Please " "ensure the provided `PREFECT_API_KEY` you are using is " diff --git a/src/prefect/client/utilities.py b/src/prefect/client/utilities.py index ffe42e63195f..81ff31199e6e 100644 --- a/src/prefect/client/utilities.py +++ b/src/prefect/client/utilities.py @@ -15,13 +15,14 @@ Optional, Tuple, TypeVar, + Union, cast, ) from typing_extensions import Concatenate, ParamSpec if TYPE_CHECKING: - from prefect.client.orchestration import PrefectClient + from prefect.client.orchestration import PrefectClient, SyncPrefectClient P = ParamSpec("P") R = TypeVar("R") @@ -29,7 +30,7 @@ def get_or_create_client( client: Optional["PrefectClient"] = None, -) -> Tuple["PrefectClient", bool]: +) -> Tuple[Union["PrefectClient", "SyncPrefectClient"], bool]: """ Returns provided client, infers a client from context if available, or creates a new client. @@ -48,7 +49,7 @@ def get_or_create_client( flow_run_context = FlowRunContext.get() task_run_context = TaskRunContext.get() - if async_client_context and async_client_context.client._loop == get_running_loop(): + if async_client_context and async_client_context.client._loop == get_running_loop(): # type: ignore[reportPrivateUsage] return async_client_context.client, True elif ( flow_run_context @@ -72,7 +73,7 @@ def client_injector( @wraps(func) async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: client, _ = get_or_create_client() - return await func(client, *args, **kwargs) + return await func(cast("PrefectClient", client), *args, **kwargs) return wrapper @@ -90,16 +91,18 @@ def inject_client( @wraps(fn) async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> R: - client = cast(Optional["PrefectClient"], kwargs.pop("client", None)) - client, inferred = get_or_create_client(client) + client, inferred = get_or_create_client( + cast(Optional["PrefectClient"], kwargs.pop("client", None)) + ) + _client = cast("PrefectClient", client) if not inferred: - context = client + context = _client else: from prefect.utilities.asyncutils import asyncnullcontext context = asyncnullcontext() async with context as new_client: - kwargs.setdefault("client", new_client or client) + kwargs.setdefault("client", new_client or _client) return await fn(*args, **kwargs) return with_injected_client diff --git a/src/prefect/utilities/asyncutils.py b/src/prefect/utilities/asyncutils.py index 3939632e1641..ce5a0229b049 100644 --- a/src/prefect/utilities/asyncutils.py +++ b/src/prefect/utilities/asyncutils.py @@ -12,6 +12,7 @@ from functools import partial, wraps from typing import ( Any, + AsyncGenerator, Awaitable, Callable, Coroutine, @@ -410,7 +411,9 @@ async def ctx_call(): @asynccontextmanager -async def asyncnullcontext(value=None, *args, **kwargs): +async def asyncnullcontext( + value: Optional[Any] = None, *args: Any, **kwargs: Any +) -> AsyncGenerator[Any, None]: yield value diff --git a/src/prefect/utilities/math.py b/src/prefect/utilities/math.py index 2ece5eb85fa3..9daca1c74186 100644 --- a/src/prefect/utilities/math.py +++ b/src/prefect/utilities/math.py @@ -2,7 +2,9 @@ import random -def poisson_interval(average_interval, lower=0, upper=1): +def poisson_interval( + average_interval: float, lower: float = 0, upper: float = 1 +) -> float: """ Generates an "inter-arrival time" for a Poisson process. @@ -16,12 +18,12 @@ def poisson_interval(average_interval, lower=0, upper=1): return -math.log(max(1 - random.uniform(lower, upper), 1e-10)) * average_interval -def exponential_cdf(x, average_interval): +def exponential_cdf(x: float, average_interval: float) -> float: ld = 1 / average_interval return 1 - math.exp(-ld * x) -def lower_clamp_multiple(k): +def lower_clamp_multiple(k: float) -> float: """ Computes a lower clamp multiple that can be used to bound a random variate drawn from an exponential distribution. @@ -38,7 +40,9 @@ def lower_clamp_multiple(k): return math.log(max(2**k / (2**k - 1), 1e-10), 2) -def clamped_poisson_interval(average_interval, clamping_factor=0.3): +def clamped_poisson_interval( + average_interval: float, clamping_factor: float = 0.3 +) -> float: """ Bounds Poisson "inter-arrival times" to a range defined by the clamping factor. @@ -57,7 +61,7 @@ def clamped_poisson_interval(average_interval, clamping_factor=0.3): return poisson_interval(average_interval, lower_rv, upper_rv) -def bounded_poisson_interval(lower_bound, upper_bound): +def bounded_poisson_interval(lower_bound: float, upper_bound: float) -> float: """ Bounds Poisson "inter-arrival times" to a range. From b3b185c5d27a47a9289cd985fd436c1d1b00d566 Mon Sep 17 00:00:00 2001 From: Martijn Pieters Date: Thu, 5 Dec 2024 18:13:51 +0000 Subject: [PATCH 15/92] Bring ORM models and Alembic migrations in sync (#16221) --- .../database/_migrations/MIGRATION-NOTES.md | 4 + .../server/database/_migrations/env.py | 36 +- ...c01be85e_sync_orm_models_and_migrations.py | 111 ++++++ ...11513ad4_sync_orm_models_and_migrations.py | 127 ++++++ src/prefect/server/database/orm_models.py | 363 +++++++++++------- 5 files changed, 493 insertions(+), 148 deletions(-) create mode 100644 src/prefect/server/database/_migrations/versions/postgresql/2024_12_04_165333_5d03c01be85e_sync_orm_models_and_migrations.py create mode 100644 src/prefect/server/database/_migrations/versions/sqlite/2024_12_04_144924_a49711513ad4_sync_orm_models_and_migrations.py diff --git a/src/prefect/server/database/_migrations/MIGRATION-NOTES.md b/src/prefect/server/database/_migrations/MIGRATION-NOTES.md index 285a6f88599f..06e796e63c8f 100644 --- a/src/prefect/server/database/_migrations/MIGRATION-NOTES.md +++ b/src/prefect/server/database/_migrations/MIGRATION-NOTES.md @@ -8,6 +8,10 @@ Each time a database migration is written, an entry is included here with: This gives us a history of changes and will create merge conflicts if two migrations are made at once, flagging situations where a branch needs to be updated before merging. +# Bring ORM models and migrations back in sync +SQLite: `a49711513ad4` +Postgres: `5d03c01be85e` + # Add `labels` column to Flow, FlowRun, TaskRun, and Deployment SQLite: `5952a5498b51` Postgres: `68a44144428d` diff --git a/src/prefect/server/database/_migrations/env.py b/src/prefect/server/database/_migrations/env.py index da92f392335e..a64574bcf85e 100644 --- a/src/prefect/server/database/_migrations/env.py +++ b/src/prefect/server/database/_migrations/env.py @@ -2,6 +2,7 @@ # https://alembic.sqlalchemy.org/en/latest/tutorial.html#creating-an-environment import contextlib +from typing import Optional import sqlalchemy from alembic import context @@ -23,7 +24,7 @@ def include_object( name: str, type_: str, reflected: bool, - compare_to: sqlalchemy.schema.SchemaItem, + compare_to: Optional[sqlalchemy.schema.SchemaItem], ) -> bool: """ Determines whether or not alembic should include an object when autogenerating @@ -53,13 +54,34 @@ def include_object( # * functional indexes (ending in 'desc', 'asc'), if an index with the same name already exists # * trigram indexes that already exist # * case_insensitive indexes that already exist + # * indexes that don't yet exist but have .ddl_if(dialect=...) metadata that doesn't match + # the current dialect. if type_ == "index": - if not reflected and any([name.endswith(suffix) for suffix in {"asc", "desc"}]): - return compare_to is None or object.name != compare_to.name - elif reflected and ( - name.startswith("gin") or name.endswith("case_insensitive") - ): - return False + if not reflected: + if name.endswith(("asc", "desc")): + return compare_to is None or object.name != compare_to.name + if (ddl_if := object._ddl_if) is not None and ddl_if.dialect is not None: + desired: set[str] = ( + {ddl_if.dialect} + if isinstance(ddl_if.dialect, str) + else set(ddl_if.dialect) + ) + return dialect.name in desired + + else: # reflected + if name.startswith("gin") or name.endswith("case_insensitive"): + return False + + # SQLite doesn't have an enum type, so reflection always comes back with + # a VARCHAR column, which doesn't match. Skip columns where the type + # doesn't match + if ( + dialect.name == "sqlite" + and type_ == "column" + and object.type.__visit_name__ == "enum" + and compare_to is not None + ): + return compare_to.type.__visit_name__ == "enum" return True diff --git a/src/prefect/server/database/_migrations/versions/postgresql/2024_12_04_165333_5d03c01be85e_sync_orm_models_and_migrations.py b/src/prefect/server/database/_migrations/versions/postgresql/2024_12_04_165333_5d03c01be85e_sync_orm_models_and_migrations.py new file mode 100644 index 000000000000..40dc90ae5dd2 --- /dev/null +++ b/src/prefect/server/database/_migrations/versions/postgresql/2024_12_04_165333_5d03c01be85e_sync_orm_models_and_migrations.py @@ -0,0 +1,111 @@ +"""Sync ORM models and migrations + +Revision ID: 5d03c01be85e +Revises: 68a44144428d +Create Date: 2024-12-04 16:53:33.015870 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "5d03c01be85e" +down_revision = "68a44144428d" +branch_labels = None +depends_on = None + + +def upgrade(): + # Column is non-null in the ORM and in SQLite. + op.execute( + "UPDATE artifact_collection SET latest_id=GEN_RANDOM_UUID() WHERE latest_id IS NULL" + ) + op.alter_column( + "artifact_collection", "latest_id", existing_type=sa.UUID(), nullable=False + ) + + # table added in 027c123512befd2bd00a0ef28bd44215e77bece6 but index was + # never created in a migration. + op.create_index( + op.f("ix_artifact_collection__updated"), + "artifact_collection", + ["updated"], + unique=False, + ) + + # columns removed in c53b00bfa1f6850ab43e168c92c627350c090647 + op.drop_column("deployment", "schedule") + op.drop_column("deployment", "is_schedule_active") + + # column removed in 5784c637e7e11a8e88e2b3146e54e9b6c97d50ef + op.drop_column("deployment", "flow_data") + + # Column is no longer a FK since d10c7471a69403bcf88f401091497a2dc8963885 + op.drop_index("ix_flow_run__deployment_id", table_name="flow_run") + + # column removed in eaa7a5063c73718dff56ce4aeb66e53fcafe60e5 + op.drop_column("deployment", "manifest_path") + + # columns removed from orm models in 0b62de684447c6955e04c722c276edac4002fd40 + op.drop_column("deployment_schedule", "catchup") + op.drop_column("deployment_schedule", "max_active_runs") + + +def downgrade(): + op.create_index( + "ix_flow_run__deployment_id", "flow_run", ["deployment_id"], unique=False + ) + op.add_column( + "deployment_schedule", + sa.Column("max_active_runs", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.add_column( + "deployment_schedule", + sa.Column( + "catchup", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=False, + ), + ) + op.add_column( + "deployment", + sa.Column( + "flow_data", + postgresql.JSONB(astext_type=sa.Text()), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "deployment", + sa.Column( + "is_schedule_active", + sa.BOOLEAN(), + server_default=sa.text("true"), + autoincrement=False, + nullable=False, + ), + ) + op.add_column( + "deployment", + sa.Column("manifest_path", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "deployment", + sa.Column( + "schedule", + postgresql.JSONB(astext_type=sa.Text()), + autoincrement=False, + nullable=True, + ), + ) + op.drop_index( + op.f("ix_artifact_collection__updated"), table_name="artifact_collection" + ) + op.alter_column( + "artifact_collection", "latest_id", existing_type=sa.UUID(), nullable=True + ) diff --git a/src/prefect/server/database/_migrations/versions/sqlite/2024_12_04_144924_a49711513ad4_sync_orm_models_and_migrations.py b/src/prefect/server/database/_migrations/versions/sqlite/2024_12_04_144924_a49711513ad4_sync_orm_models_and_migrations.py new file mode 100644 index 000000000000..f94cb7b6bed8 --- /dev/null +++ b/src/prefect/server/database/_migrations/versions/sqlite/2024_12_04_144924_a49711513ad4_sync_orm_models_and_migrations.py @@ -0,0 +1,127 @@ +"""Sync ORM models and migrations + +Revision ID: a49711513ad4 +Revises: 5952a5498b51 +Create Date: 2024-12-04 14:49:24.099491 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import sqlite + +# revision identifiers, used by Alembic. +revision = "a49711513ad4" +down_revision = "5952a5498b51" +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table("artifact_collection", schema=None) as batch_op: + # table added in 027c123512befd2bd00a0ef28bd44215e77bece6 but index was + # never created in a migration. + batch_op.create_index( + batch_op.f("ix_artifact_collection__updated"), ["updated"], unique=False + ) + # index created on the wrong table in ca9f93463a4c38fce8be972d91e808b5935e5d9c + batch_op.drop_index("ix_artifact__key_created_desc") + + with op.batch_alter_table("artifact", schema=None) as batch_op: + # index created on the wrong table in ca9f93463a4c38fce8be972d91e808b5935e5d9c + batch_op.create_index( + "ix_artifact__key_created_desc", + ["key", sa.text("created DESC")], + unique=False, + postgresql_include=["id", "updated", "type", "task_run_id", "flow_run_id"], + ) + + with op.batch_alter_table("block_document", schema=None) as batch_op: + # Renamed index to remain consistent with PostgreSQL + batch_op.drop_index("ix_block_document__block_type_name_name") + batch_op.create_index( + "ix_block_document__block_type_name__name", + ["block_type_name", "name"], + unique=False, + ) + + with op.batch_alter_table("deployment", schema=None) as batch_op: + # columns removed in c53b00bfa1f6850ab43e168c92c627350c090647 + batch_op.drop_column("schedule") + batch_op.drop_column("is_schedule_active") + + # column removed in 5784c637e7e11a8e88e2b3146e54e9b6c97d50ef + batch_op.drop_column("flow_data") + + # column removed in eaa7a5063c73718dff56ce4aeb66e53fcafe60e5 + batch_op.drop_column("manifest_path") + + with op.batch_alter_table("deployment_schedule", schema=None) as batch_op: + # columns removed from orm models in 0b62de684447c6955e04c722c276edac4002fd40 + batch_op.drop_column("catchup") + batch_op.drop_column("max_active_runs") + + with op.batch_alter_table("flow_run", schema=None) as batch_op: + # Column is no longer a FK since d10c7471a69403bcf88f401091497a2dc8963885 + batch_op.drop_index("ix_flow_run__deployment_id") + # Index accidentally dropped in 519a2ed6e31e2b60136e1a1a163a9cd0a8d3d5c4 + batch_op.create_index( + "ix_flow_run__scheduler_deployment_id_auto_scheduled_next_schedu", + ["deployment_id", "auto_scheduled", "next_scheduled_start_time"], + unique=False, + postgresql_where=sa.text("state_type = 'SCHEDULED'::state_type"), + sqlite_where=sa.text("state_type = 'SCHEDULED'"), + ) + + +def downgrade(): + with op.batch_alter_table("flow_run", schema=None) as batch_op: + batch_op.drop_index( + "ix_flow_run__scheduler_deployment_id_auto_scheduled_next_schedu", + postgresql_where=sa.text("state_type = 'SCHEDULED'::state_type"), + sqlite_where=sa.text("state_type = 'SCHEDULED'"), + ) + batch_op.create_index( + "ix_flow_run__deployment_id", ["deployment_id"], unique=False + ) + + with op.batch_alter_table("deployment_schedule", schema=None) as batch_op: + batch_op.add_column(sa.Column("max_active_runs", sa.INTEGER(), nullable=True)) + batch_op.add_column( + sa.Column( + "catchup", sa.BOOLEAN(), server_default=sa.text("'0'"), nullable=False + ) + ) + + with op.batch_alter_table("deployment", schema=None) as batch_op: + batch_op.add_column( + sa.Column( + "is_schedule_active", + sa.BOOLEAN(), + server_default=sa.text("'1'"), + nullable=False, + ) + ) + batch_op.add_column(sa.Column("flow_data", sqlite.JSON(), nullable=True)) + batch_op.add_column(sa.Column("schedule", sqlite.JSON(), nullable=True)) + batch_op.add_column(sa.Column("manifest_path", sa.VARCHAR(), nullable=True)) + + with op.batch_alter_table("block_document", schema=None) as batch_op: + batch_op.drop_index("ix_block_document__block_type_name__name") + batch_op.create_index( + "ix_block_document__block_type_name_name", + ["block_type_name", "name"], + unique=False, + ) + + with op.batch_alter_table("artifact", schema=None) as batch_op: + batch_op.drop_index( + "ix_artifact__key_created_desc", + postgresql_include=["id", "updated", "type", "task_run_id", "flow_run_id"], + ) + + with op.batch_alter_table("artifact_collection", schema=None) as batch_op: + batch_op.create_index( + "ix_artifact__key_created_desc", ["key", "created"], unique=False + ) + batch_op.drop_index(batch_op.f("ix_artifact_collection__updated")) diff --git a/src/prefect/server/database/orm_models.py b/src/prefect/server/database/orm_models.py index 9824bd862cfa..910b232af72a 100644 --- a/src/prefect/server/database/orm_models.py +++ b/src/prefect/server/database/orm_models.py @@ -2,7 +2,7 @@ import uuid from abc import ABC, abstractmethod from pathlib import Path -from typing import Any, Dict, Hashable, List, Tuple, Union, cast +from typing import Any, Dict, Hashable, Iterable, List, Optional, Tuple, Union, cast import pendulum import sqlalchemy as sa @@ -141,7 +141,7 @@ class Flow(Base): tags: Mapped[List[str]] = mapped_column( JSON, server_default="[]", default=list, nullable=False ) - labels: Mapped[Union[schemas.core.KeyValueLabels, None]] = mapped_column( + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( JSON, nullable=True ) @@ -151,6 +151,9 @@ class Flow(Base): __table_args__ = ( sa.UniqueConstraint("name"), sa.Index("ix_flow__created", "created"), + sa.Index("trgm_ix_flow_name", "name", postgresql_using="gin").ddl_if( + dialect="postgresql" + ), ) @@ -178,7 +181,7 @@ class FlowRunState(Base): default=schemas.states.StateDetails, nullable=False, ) - _data = sa.Column(sa.JSON, nullable=True, name="data") + _data: Optional[Any] = sa.Column(JSON, nullable=True, name="data") result_artifact_id = sa.Column( UUID(), @@ -216,14 +219,17 @@ def data(self): def as_state(self) -> schemas.states.State: return schemas.states.State.model_validate(self, from_attributes=True) - __table_args__ = ( - sa.Index( - "uq_flow_run_state__flow_run_id_timestamp_desc", - "flow_run_id", - sa.desc("timestamp"), - unique=True, - ), - ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> Iterable[sa.Index]: + return ( + sa.Index( + "uq_flow_run_state__flow_run_id_timestamp_desc", + cls.flow_run_id, + cls.timestamp.desc(), + unique=True, + ), + ) class TaskRunState(Base): @@ -252,7 +258,7 @@ class TaskRunState(Base): default=schemas.states.StateDetails, nullable=False, ) - _data = sa.Column(sa.JSON, nullable=True, name="data") + _data: Optional[Any] = sa.Column(JSON, nullable=True, name="data") result_artifact_id = sa.Column( UUID(), @@ -290,14 +296,17 @@ def data(self): def as_state(self) -> schemas.states.State: return schemas.states.State.model_validate(self, from_attributes=True) - __table_args__ = ( - sa.Index( - "uq_task_run_state__task_run_id_timestamp_desc", - "task_run_id", - sa.desc("timestamp"), - unique=True, - ), - ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> Iterable[sa.Index]: + return ( + sa.Index( + "uq_task_run_state__task_run_id_timestamp_desc", + cls.task_run_id, + cls.timestamp.desc(), + unique=True, + ), + ) class Artifact(Base): @@ -330,12 +339,27 @@ class Artifact(Base): # Suffixed with underscore as attribute name 'metadata' is reserved for the MetaData instance when using a declarative base class. metadata_ = sa.Column(sa.JSON, nullable=True) - __table_args__ = ( - sa.Index( - "ix_artifact__key", - "key", - ), - ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> Iterable[sa.Index]: + return ( + sa.Index( + "ix_artifact__key", + cls.key, + ), + sa.Index( + "ix_artifact__key_created_desc", + cls.key, + cls.created.desc(), + postgresql_include=[ + "id", + "updated", + "type", + "task_run_id", + "flow_run_id", + ], + ), + ) class ArtifactCollection(Base): @@ -383,13 +407,16 @@ class TaskRunStateCache(Base): ) task_run_state_id = sa.Column(UUID(), nullable=False) - __table_args__ = ( - sa.Index( - "ix_task_run_state_cache__cache_key_created_desc", - "cache_key", - sa.desc("created"), - ), - ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> Iterable[sa.Index]: + return ( + sa.Index( + "ix_task_run_state_cache__cache_key_created_desc", + cls.cache_key, + cls.created.desc(), + ), + ) class Run(Base): @@ -407,14 +434,16 @@ class Run(Base): ) state_type = sa.Column(sa.Enum(schemas.states.StateType, name="state_type")) state_name = sa.Column(sa.String, nullable=True) - state_timestamp: Mapped[Union[pendulum.DateTime, None]] = mapped_column( + state_timestamp: Mapped[Optional[pendulum.DateTime]] = mapped_column( Timestamp(), nullable=True ) run_count = sa.Column(sa.Integer, server_default="0", default=0, nullable=False) - expected_start_time: Mapped[pendulum.DateTime] = mapped_column(Timestamp()) + expected_start_time: Mapped[Optional[pendulum.DateTime]] = mapped_column( + Timestamp() + ) next_scheduled_start_time = sa.Column(Timestamp()) - start_time: Mapped[pendulum.DateTime] = mapped_column(Timestamp()) - end_time: Mapped[pendulum.DateTime] = mapped_column(Timestamp()) + start_time: Mapped[Optional[pendulum.DateTime]] = mapped_column(Timestamp()) + end_time: Mapped[Optional[pendulum.DateTime]] = mapped_column(Timestamp()) total_run_time: Mapped[datetime.timedelta] = mapped_column( sa.Interval(), server_default="0", @@ -501,7 +530,7 @@ class FlowRun(Run): index=True, ) - deployment_id: Mapped[Union[uuid.UUID, None]] = mapped_column(UUID(), nullable=True) + deployment_id: Mapped[Optional[uuid.UUID]] = mapped_column(UUID(), nullable=True) work_queue_name = sa.Column(sa.String, index=True) flow_version = sa.Column(sa.String, index=True) deployment_version = sa.Column(sa.String, index=True) @@ -517,11 +546,11 @@ class FlowRun(Run): tags: Mapped[List[str]] = mapped_column( JSON, server_default="[]", default=list, nullable=False ) - labels: Mapped[Union[schemas.core.KeyValueLabels, None]] = mapped_column( + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( JSON, nullable=True ) - created_by: Mapped[Union[schemas.core.CreatedBy, None]] = mapped_column( + created_by: Mapped[Optional[schemas.core.CreatedBy]] = mapped_column( Pydantic(schemas.core.CreatedBy), server_default=None, default=None, @@ -538,7 +567,7 @@ class FlowRun(Run): index=True, ) - parent_task_run_id: Mapped[uuid.UUID] = mapped_column( + parent_task_run_id: Mapped[Optional[uuid.UUID]] = mapped_column( UUID(), sa.ForeignKey( "task_run.id", @@ -563,7 +592,7 @@ class FlowRun(Run): index=True, ) - work_queue_id: Mapped[Union[uuid.UUID, None]] = mapped_column( + work_queue_id: Mapped[Optional[uuid.UUID]] = mapped_column( UUID, sa.ForeignKey("work_queue.id", ondelete="SET NULL"), nullable=True, @@ -629,50 +658,65 @@ def set_state(self, state): foreign_keys=[work_queue_id], ) - __table_args__ = ( - sa.Index( - "uq_flow_run__flow_id_idempotency_key", - "flow_id", - "idempotency_key", - unique=True, - ), - sa.Index( - "ix_flow_run__coalesce_start_time_expected_start_time_desc", - sa.desc(coalesce("start_time", "expected_start_time")), - ), - sa.Index( - "ix_flow_run__coalesce_start_time_expected_start_time_asc", - sa.asc(coalesce("start_time", "expected_start_time")), - ), - sa.Index( - "ix_flow_run__expected_start_time_desc", - sa.desc("expected_start_time"), - ), - sa.Index( - "ix_flow_run__next_scheduled_start_time_asc", - sa.asc("next_scheduled_start_time"), - ), - sa.Index( - "ix_flow_run__end_time_desc", - sa.desc("end_time"), - ), - sa.Index( - "ix_flow_run__start_time", - "start_time", - ), - sa.Index( - "ix_flow_run__state_type", - "state_type", - ), - sa.Index( - "ix_flow_run__state_name", - "state_name", - ), - sa.Index( - "ix_flow_run__state_timestamp", - "state_timestamp", - ), - ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> Iterable[sa.Index]: + return ( + sa.Index( + "uq_flow_run__flow_id_idempotency_key", + cls.flow_id, + cls.idempotency_key, + unique=True, + ), + sa.Index( + "ix_flow_run__coalesce_start_time_expected_start_time_desc", + coalesce(cls.start_time, cls.expected_start_time).desc(), + ), + sa.Index( + "ix_flow_run__coalesce_start_time_expected_start_time_asc", + coalesce(cls.start_time, cls.expected_start_time).asc(), + ), + sa.Index( + "ix_flow_run__expected_start_time_desc", + cls.expected_start_time.desc(), + ), + sa.Index( + "ix_flow_run__next_scheduled_start_time_asc", + cls.next_scheduled_start_time.asc(), + ), + sa.Index( + "ix_flow_run__end_time_desc", + cls.end_time.desc(), + ), + sa.Index( + "ix_flow_run__start_time", + cls.start_time, + ), + sa.Index( + "ix_flow_run__state_type", + cls.state_type, + ), + sa.Index( + "ix_flow_run__state_name", + cls.state_name, + ), + sa.Index( + "ix_flow_run__state_timestamp", + cls.state_timestamp, + ), + sa.Index("trgm_ix_flow_run_name", cls.name, postgresql_using="gin").ddl_if( + dialect="postgresql" + ), + sa.Index( + # index names are at most 63 characters long. + "ix_flow_run__scheduler_deployment_id_auto_scheduled_next_schedu", + cls.deployment_id, + cls.auto_scheduled, + cls.next_scheduled_start_time, + postgresql_where=cls.state_type == schemas.states.StateType.SCHEDULED, + sqlite_where=cls.state_type == schemas.states.StateType.SCHEDULED, + ), + ) class TaskRun(Run): @@ -719,7 +763,7 @@ class TaskRun(Run): tags: Mapped[List[str]] = mapped_column( JSON, server_default="[]", default=list, nullable=False ) - labels: Mapped[Union[schemas.core.KeyValueLabels, None]] = mapped_column( + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( JSON, nullable=True ) @@ -786,43 +830,49 @@ def set_state(self, state): uselist=False, ) - __table_args__ = ( - sa.Index( - "uq_task_run__flow_run_id_task_key_dynamic_key", - "flow_run_id", - "task_key", - "dynamic_key", - unique=True, - ), - sa.Index( - "ix_task_run__expected_start_time_desc", - sa.desc("expected_start_time"), - ), - sa.Index( - "ix_task_run__next_scheduled_start_time_asc", - sa.asc("next_scheduled_start_time"), - ), - sa.Index( - "ix_task_run__end_time_desc", - sa.desc("end_time"), - ), - sa.Index( - "ix_task_run__start_time", - "start_time", - ), - sa.Index( - "ix_task_run__state_type", - "state_type", - ), - sa.Index( - "ix_task_run__state_name", - "state_name", - ), - sa.Index( - "ix_task_run__state_timestamp", - "state_timestamp", - ), - ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> Iterable[sa.Index]: + return ( + sa.Index( + "uq_task_run__flow_run_id_task_key_dynamic_key", + cls.flow_run_id, + cls.task_key, + cls.dynamic_key, + unique=True, + ), + sa.Index( + "ix_task_run__expected_start_time_desc", + cls.expected_start_time.desc(), + ), + sa.Index( + "ix_task_run__next_scheduled_start_time_asc", + cls.next_scheduled_start_time.asc(), + ), + sa.Index( + "ix_task_run__end_time_desc", + cls.end_time.desc(), + ), + sa.Index( + "ix_task_run__start_time", + cls.start_time, + ), + sa.Index( + "ix_task_run__state_type", + cls.state_type, + ), + sa.Index( + "ix_task_run__state_name", + cls.state_name, + ), + sa.Index( + "ix_task_run__state_timestamp", + cls.state_timestamp, + ), + sa.Index("trgm_ix_task_run_name", cls.name, postgresql_using="gin").ddl_if( + dialect="postgresql" + ), + ) class DeploymentSchedule(Base): @@ -868,7 +918,7 @@ def job_variables(self): index=True, ) - work_queue_id: Mapped[uuid.UUID] = mapped_column( + work_queue_id: Mapped[Optional[uuid.UUID]] = mapped_column( UUID, sa.ForeignKey("work_queue.id", ondelete="SET NULL"), nullable=True, @@ -885,21 +935,21 @@ def job_variables(self): ) # deprecated in favor of `concurrency_limit_id` FK - _concurrency_limit: Mapped[Union[int, None]] = mapped_column( + _concurrency_limit: Mapped[Optional[int]] = mapped_column( sa.Integer, default=None, nullable=True, name="concurrency_limit" ) - concurrency_limit_id: Mapped[Union[uuid.UUID, None]] = mapped_column( + concurrency_limit_id: Mapped[Optional[uuid.UUID]] = mapped_column( UUID, sa.ForeignKey("concurrency_limit_v2.id", ondelete="SET NULL"), nullable=True, ) global_concurrency_limit: Mapped[ - Union["ConcurrencyLimitV2", None] + Optional["ConcurrencyLimitV2"] ] = sa.orm.relationship( lazy="selectin", ) concurrency_options: Mapped[ - Union[schemas.core.ConcurrencyOptions, None] + Optional[schemas.core.ConcurrencyOptions] ] = mapped_column( Pydantic(schemas.core.ConcurrencyOptions), server_default=None, @@ -910,7 +960,7 @@ def job_variables(self): tags: Mapped[List[str]] = mapped_column( JSON, server_default="[]", default=list, nullable=False ) - labels: Mapped[Union[schemas.core.KeyValueLabels, None]] = mapped_column( + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( JSON, nullable=True ) parameters = sa.Column(JSON, server_default="{}", default=dict, nullable=False) @@ -963,6 +1013,9 @@ def job_variables(self): "ix_deployment__created", "created", ), + sa.Index("trgm_ix_deployment_name", "name", postgresql_using="gin").ddl_if( + dialect="postgresql" + ), ) @@ -1029,6 +1082,9 @@ class BlockType(Base): "slug", unique=True, ), + sa.Index("trgm_ix_block_type_name", "name", postgresql_using="gin").ddl_if( + dialect="postgresql" + ), ) @@ -1059,6 +1115,9 @@ class BlockSchema(Base): unique=True, ), sa.Index("ix_block_schema__created", "created"), + sa.Index( + "ix_block_schema__capabilities", "capabilities", postgresql_using="gin" + ).ddl_if(dialect="postgresql"), ) @@ -1108,6 +1167,10 @@ class BlockDocument(Base): "name", unique=True, ), + sa.Index("ix_block_document__block_type_name__name", "block_type_name", "name"), + sa.Index("trgm_ix_block_document_name", "name", postgresql_using="gin").ddl_if( + dialect="postgresql" + ), ) async def encrypt_data(self, session, data): @@ -1181,9 +1244,9 @@ class WorkQueue(Base): sa.Integer, nullable=True, ) - priority: Mapped[int] = mapped_column(sa.Integer, index=True, nullable=False) + priority: Mapped[int] = mapped_column(sa.Integer, nullable=False) - last_polled: Mapped[Union[pendulum.DateTime, None]] = mapped_column( + last_polled: Mapped[Optional[pendulum.DateTime]] = mapped_column( Timestamp(), nullable=True, ) @@ -1194,8 +1257,6 @@ class WorkQueue(Base): server_default=WorkQueueStatus.NOT_READY.value, ) - __table_args__ = (sa.UniqueConstraint("work_pool_id", "name"),) - work_pool_id: Mapped[uuid.UUID] = mapped_column( UUID, sa.ForeignKey("work_pool.id", ondelete="cascade"), @@ -1209,18 +1270,30 @@ class WorkQueue(Base): foreign_keys=[work_pool_id], ) + __table_args__ = ( + sa.UniqueConstraint("work_pool_id", "name"), + sa.Index("ix_work_queue__work_pool_id_priority", "work_pool_id", "priority"), + sa.Index("trgm_ix_work_queue_name", "name", postgresql_using="gin").ddl_if( + dialect="postgresql" + ), + ) + class WorkPool(Base): """SQLAlchemy model of an worker""" name = sa.Column(sa.String, nullable=False) description = sa.Column(sa.String) - type: Mapped[str] = mapped_column(sa.String) + type: Mapped[str] = mapped_column(sa.String, index=True) base_job_template = sa.Column(JSON, nullable=False, server_default="{}", default={}) is_paused: Mapped[bool] = mapped_column( sa.Boolean, nullable=False, server_default="0", default=False ) - default_queue_id: Mapped[UUID] = mapped_column(UUID, nullable=True) + default_queue_id: Mapped[UUID] = mapped_column( + UUID, + sa.ForeignKey("work_queue.id", ondelete="RESTRICT", use_alter=True), + nullable=True, + ) concurrency_limit = sa.Column( sa.Integer, nullable=True, @@ -1232,10 +1305,12 @@ class WorkPool(Base): default=WorkPoolStatus.NOT_READY, server_default=WorkPoolStatus.NOT_READY.value, ) - last_transitioned_status_at: Mapped[Union[pendulum.DateTime, None]] = mapped_column( + last_transitioned_status_at: Mapped[Optional[pendulum.DateTime]] = mapped_column( Timestamp(), nullable=True ) - last_status_event_id: Mapped[uuid.UUID] = mapped_column(UUID, nullable=True) + last_status_event_id: Mapped[Optional[uuid.UUID]] = mapped_column( + UUID, nullable=True + ) __table_args__ = (sa.UniqueConstraint("name"),) @@ -1258,7 +1333,6 @@ def work_pool_id(cls): nullable=False, server_default=now(), default=lambda: pendulum.now("UTC"), - index=True, ) heartbeat_interval_seconds = sa.Column(sa.Integer, nullable=True) @@ -1269,7 +1343,14 @@ def work_pool_id(cls): server_default=WorkerStatus.OFFLINE.value, ) - __table_args__ = (sa.UniqueConstraint("work_pool_id", "name"),) + __table_args__ = ( + sa.UniqueConstraint("work_pool_id", "name"), + sa.Index( + "ix_worker__work_pool_id_last_heartbeat_time", + "work_pool_id", + "last_heartbeat_time", + ), + ) class Agent(Base): @@ -1325,7 +1406,7 @@ class FlowRunNotificationQueue(Base): class Variable(Base): name = sa.Column(sa.String, nullable=False) - value = sa.Column(sa.JSON, nullable=False) + value: Optional[Any] = sa.Column(JSON) tags = sa.Column(JSON, server_default="[]", default=list, nullable=False) __table_args__ = (sa.UniqueConstraint("name"),) From ba8aad9738f8fd2145cd1ca47fba3e167ea185cf Mon Sep 17 00:00:00 2001 From: Martijn Pieters Date: Thu, 5 Dec 2024 21:30:22 +0000 Subject: [PATCH 16/92] Use SQLAlchemy 2.0 style for ORM models, make typing complete (#16230) --- setup.cfg | 3 +- src/prefect/server/database/orm_models.py | 1071 +++++++++------------ 2 files changed, 452 insertions(+), 622 deletions(-) diff --git a/setup.cfg b/setup.cfg index fec375d062b9..f32eb182605d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -98,7 +98,8 @@ branch = True exclude_lines = # Don't complain about abstract methods, they aren't run: @(abc\.)?abstractmethod - + # if TYPE_CHECKING: lines are never nun + if TYPE_CHECKING: omit = src/prefect/server/database/migrations/versions/* diff --git a/src/prefect/server/database/orm_models.py b/src/prefect/server/database/orm_models.py index 910b232af72a..49d89e6b6559 100644 --- a/src/prefect/server/database/orm_models.py +++ b/src/prefect/server/database/orm_models.py @@ -2,11 +2,22 @@ import uuid from abc import ABC, abstractmethod from pathlib import Path -from typing import Any, Dict, Hashable, Iterable, List, Optional, Tuple, Union, cast +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Hashable, + Iterable, + Optional, + Union, + cast, +) import pendulum import sqlalchemy as sa from sqlalchemy import FetchedValue +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import ( DeclarativeBase, @@ -14,12 +25,12 @@ declared_attr, mapped_column, registry, + relationship, synonym, ) -from sqlalchemy.sql.expression import ColumnElement +from sqlalchemy.sql import roles from sqlalchemy.sql.functions import coalesce -import prefect import prefect.server.schemas as schemas from prefect.server.events.actions import ServerActionTypes from prefect.server.events.schemas.automations import ( @@ -55,7 +66,7 @@ class Base(DeclarativeBase): and provides ID, created, and updated columns """ - registry = registry( + registry: ClassVar[sa.orm.registry] = registry( metadata=sa.schema.MetaData( # define naming conventions for our Base class to use # sqlalchemy will use the following templated strings @@ -82,6 +93,7 @@ class Base(DeclarativeBase): ), type_annotation_map={ uuid.UUID: UUID, + pendulum.DateTime: Timestamp, }, ) @@ -93,13 +105,13 @@ class Base(DeclarativeBase): # an INSERT, for example # # https://docs.sqlalchemy.org/en/14/orm/extensions/asyncio.html#preventing-implicit-io-when-using-asyncsession - __mapper_args__ = {"eager_defaults": True} + __mapper_args__: dict[str, Any] = {"eager_defaults": True} - def __repr__(self): + def __repr__(self) -> str: return f"{self.__class__.__name__}(id={self.id})" - @declared_attr - def __tablename__(cls): + @declared_attr.directive + def __tablename__(cls) -> str: """ By default, turn the model's camel-case class name into a snake-case table name. Override by providing @@ -114,18 +126,13 @@ def __tablename__(cls): ) created: Mapped[pendulum.DateTime] = mapped_column( - Timestamp(), - nullable=False, - server_default=now(), - default=lambda: pendulum.now("UTC"), + server_default=now(), default=lambda: pendulum.now("UTC") ) # onupdate is only called when statements are actually issued # against the database. until COMMIT is issued, this column # will not be updated - updated = sa.Column( - Timestamp(), - nullable=False, + updated: Mapped[pendulum.DateTime] = mapped_column( index=True, server_default=now(), default=lambda: pendulum.now("UTC"), @@ -137,18 +144,18 @@ def __tablename__(cls): class Flow(Base): """SQLAlchemy mixin of a flow.""" - name = sa.Column(sa.String, nullable=False) - tags: Mapped[List[str]] = mapped_column( - JSON, server_default="[]", default=list, nullable=False + name: Mapped[str] + tags: Mapped[list[str]] = mapped_column(JSON, server_default="[]", default=list) + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column(JSON) + + flow_runs: Mapped[list["FlowRun"]] = relationship( + back_populates="flow", lazy="raise" ) - labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( - JSON, nullable=True + deployments: Mapped[list["Deployment"]] = relationship( + back_populates="flow", lazy="raise" ) - flow_runs = sa.orm.relationship("FlowRun", back_populates="flow", lazy="raise") - deployments = sa.orm.relationship("Deployment", back_populates="flow", lazy="raise") - - __table_args__ = ( + __table_args__: Any = ( sa.UniqueConstraint("name"), sa.Index("ix_flow__created", "created"), sa.Index("trgm_ix_flow_name", "name", postgresql_using="gin").ddl_if( @@ -160,61 +167,49 @@ class Flow(Base): class FlowRunState(Base): """SQLAlchemy mixin of a flow run state.""" - flow_run_id = sa.Column( - UUID(), sa.ForeignKey("flow_run.id", ondelete="cascade"), nullable=False + flow_run_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("flow_run.id", ondelete="cascade") ) - type = sa.Column( - sa.Enum(schemas.states.StateType, name="state_type"), nullable=False, index=True + type: Mapped[schemas.states.StateType] = mapped_column( + sa.Enum(schemas.states.StateType, name="state_type"), index=True ) - timestamp = sa.Column( - Timestamp(), - nullable=False, - server_default=now(), - default=lambda: pendulum.now("UTC"), + timestamp: Mapped[pendulum.DateTime] = mapped_column( + server_default=now(), default=lambda: pendulum.now("UTC") ) - name = sa.Column(sa.String, nullable=False, index=True) - message = sa.Column(sa.String) - state_details = sa.Column( + name: Mapped[str] = mapped_column(index=True) + message: Mapped[Optional[str]] + state_details: Mapped[schemas.states.StateDetails] = mapped_column( Pydantic(schemas.states.StateDetails), server_default="{}", default=schemas.states.StateDetails, - nullable=False, ) - _data: Optional[Any] = sa.Column(JSON, nullable=True, name="data") + _data: Mapped[Optional[Any]] = mapped_column(JSON, name="data") - result_artifact_id = sa.Column( - UUID(), - sa.ForeignKey( - "artifact.id", - ondelete="SET NULL", - use_alter=True, - ), + result_artifact_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("artifact.id", ondelete="SET NULL", use_alter=True), index=True, ) - _result_artifact = sa.orm.relationship( - "Artifact", + _result_artifact: Mapped[Optional["Artifact"]] = relationship( lazy="selectin", foreign_keys=[result_artifact_id], primaryjoin="Artifact.id==FlowRunState.result_artifact_id", ) @hybrid_property - def data(self): + def data(self) -> Optional[Any]: if self._data: # ensures backwards compatibility for results stored on state objects return self._data if not self.result_artifact_id: # do not try to load the relationship if there's no artifact id return None + if TYPE_CHECKING: + assert self._result_artifact is not None return self._result_artifact.data - flow_run = sa.orm.relationship( - "FlowRun", - lazy="raise", - foreign_keys=[flow_run_id], - ) + flow_run: Mapped["FlowRun"] = relationship(lazy="raise", foreign_keys=[flow_run_id]) def as_state(self) -> schemas.states.State: return schemas.states.State.model_validate(self, from_attributes=True) @@ -237,61 +232,48 @@ class TaskRunState(Base): # this column isn't explicitly indexed because it is included in # the unique compound index on (task_run_id, timestamp) - task_run_id = sa.Column( - UUID(), sa.ForeignKey("task_run.id", ondelete="cascade"), nullable=False + task_run_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("task_run.id", ondelete="cascade") ) - type = sa.Column( - sa.Enum(schemas.states.StateType, name="state_type"), nullable=False, index=True + type: Mapped[schemas.states.StateType] = mapped_column( + sa.Enum(schemas.states.StateType, name="state_type"), index=True ) - timestamp = sa.Column( - Timestamp(), - nullable=False, - server_default=now(), - default=lambda: pendulum.now("UTC"), + timestamp: Mapped[pendulum.DateTime] = mapped_column( + server_default=now(), default=lambda: pendulum.now("UTC") ) - name = sa.Column(sa.String, nullable=False, index=True) - message = sa.Column(sa.String) - state_details = sa.Column( + name: Mapped[str] = mapped_column(index=True) + message: Mapped[Optional[str]] + state_details: Mapped[schemas.states.StateDetails] = mapped_column( Pydantic(schemas.states.StateDetails), server_default="{}", default=schemas.states.StateDetails, - nullable=False, ) - _data: Optional[Any] = sa.Column(JSON, nullable=True, name="data") + _data: Mapped[Optional[Any]] = mapped_column(JSON, name="data") - result_artifact_id = sa.Column( - UUID(), - sa.ForeignKey( - "artifact.id", - ondelete="SET NULL", - use_alter=True, - ), - index=True, + result_artifact_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("artifact.id", ondelete="SET NULL", use_alter=True), index=True ) - _result_artifact = sa.orm.relationship( - "Artifact", + _result_artifact: Mapped[Optional["Artifact"]] = relationship( lazy="selectin", foreign_keys=[result_artifact_id], primaryjoin="Artifact.id==TaskRunState.result_artifact_id", ) @hybrid_property - def data(self): + def data(self) -> Optional[Any]: if self._data: # ensures backwards compatibility for results stored on state objects return self._data if not self.result_artifact_id: # do not try to load the relationship if there's no artifact id return None + if TYPE_CHECKING: + assert self._result_artifact is not None return self._result_artifact.data - task_run = sa.orm.relationship( - "TaskRun", - lazy="raise", - foreign_keys=[task_run_id], - ) + task_run: Mapped["TaskRun"] = relationship(lazy="raise", foreign_keys=[task_run_id]) def as_state(self) -> schemas.states.State: return schemas.states.State.model_validate(self, from_attributes=True) @@ -314,30 +296,18 @@ class Artifact(Base): SQLAlchemy model of artifacts. """ - key = sa.Column( - sa.String, - nullable=True, - index=True, - ) + key: Mapped[Optional[str]] = mapped_column(index=True) - task_run_id = sa.Column( - UUID(), - nullable=True, - index=True, - ) + task_run_id: Mapped[Optional[uuid.UUID]] = mapped_column(index=True) - flow_run_id = sa.Column( - UUID(), - nullable=True, - index=True, - ) + flow_run_id: Mapped[Optional[uuid.UUID]] = mapped_column(index=True) - type = sa.Column(sa.String) - data = sa.Column(sa.JSON, nullable=True) - description = sa.Column(sa.String, nullable=True) + type: Mapped[Optional[str]] + data: Mapped[Optional[Any]] = mapped_column(sa.JSON) + description: Mapped[Optional[str]] # Suffixed with underscore as attribute name 'metadata' is reserved for the MetaData instance when using a declarative base class. - metadata_ = sa.Column(sa.JSON, nullable=True) + metadata_: Mapped[Optional[dict[str, str]]] = mapped_column(sa.JSON) @declared_attr.directive @classmethod @@ -363,29 +333,20 @@ def __table_args__(cls) -> Iterable[sa.Index]: class ArtifactCollection(Base): - key = sa.Column( - sa.String, - nullable=False, - ) + key: Mapped[str] - latest_id: Mapped[UUID] = mapped_column(UUID(), nullable=False) + latest_id: Mapped[uuid.UUID] - task_run_id = sa.Column( - UUID(), - nullable=True, - ) + task_run_id: Mapped[Optional[uuid.UUID]] - flow_run_id = sa.Column( - UUID(), - nullable=True, - ) + flow_run_id: Mapped[Optional[uuid.UUID]] - type = sa.Column(sa.String) - data = sa.Column(sa.JSON, nullable=True) - description = sa.Column(sa.String, nullable=True) - metadata_ = sa.Column(sa.JSON, nullable=True) + type: Mapped[Optional[str]] + data: Mapped[Optional[Any]] = mapped_column(sa.JSON) + description: Mapped[Optional[str]] + metadata_: Mapped[Optional[dict[str, str]]] = mapped_column(sa.JSON) - __table_args__ = ( + __table_args__: Any = ( sa.UniqueConstraint("key"), sa.Index( "ix_artifact_collection__key_latest_id", @@ -400,12 +361,9 @@ class TaskRunStateCache(Base): SQLAlchemy model of a task run state cache. """ - cache_key = sa.Column(sa.String, nullable=False) - cache_expiration = sa.Column( - Timestamp(), - nullable=True, - ) - task_run_state_id = sa.Column(UUID(), nullable=False) + cache_key: Mapped[str] = mapped_column() + cache_expiration: Mapped[Optional[pendulum.DateTime]] + task_run_state_id: Mapped[uuid.UUID] @declared_attr.directive @classmethod @@ -426,43 +384,36 @@ class Run(Base): __abstract__ = True - name: Mapped[str] = mapped_column( - sa.String, - default=lambda: generate_slug(2), - nullable=False, - index=True, - ) - state_type = sa.Column(sa.Enum(schemas.states.StateType, name="state_type")) - state_name = sa.Column(sa.String, nullable=True) - state_timestamp: Mapped[Optional[pendulum.DateTime]] = mapped_column( - Timestamp(), nullable=True - ) - run_count = sa.Column(sa.Integer, server_default="0", default=0, nullable=False) - expected_start_time: Mapped[Optional[pendulum.DateTime]] = mapped_column( - Timestamp() - ) - next_scheduled_start_time = sa.Column(Timestamp()) - start_time: Mapped[Optional[pendulum.DateTime]] = mapped_column(Timestamp()) - end_time: Mapped[Optional[pendulum.DateTime]] = mapped_column(Timestamp()) + name: Mapped[str] = mapped_column(default=lambda: generate_slug(2), index=True) + state_type: Mapped[Optional[schemas.states.StateType]] = mapped_column( + sa.Enum(schemas.states.StateType, name="state_type") + ) + state_name: Mapped[Optional[str]] + state_timestamp: Mapped[Optional[pendulum.DateTime]] + run_count: Mapped[int] = mapped_column(server_default="0", default=0) + expected_start_time: Mapped[Optional[pendulum.DateTime]] + next_scheduled_start_time: Mapped[Optional[pendulum.DateTime]] + start_time: Mapped[Optional[pendulum.DateTime]] + end_time: Mapped[Optional[pendulum.DateTime]] total_run_time: Mapped[datetime.timedelta] = mapped_column( - sa.Interval(), - server_default="0", - default=datetime.timedelta(0), - nullable=False, + server_default="0", default=datetime.timedelta(0) ) @hybrid_property - def estimated_run_time(self): + def estimated_run_time(self) -> datetime.timedelta: """Total run time is incremented in the database whenever a RUNNING state is exited. To give up-to-date estimates, we estimate incremental run time for any runs currently in a RUNNING state.""" if self.state_type and self.state_type == schemas.states.StateType.RUNNING: + if TYPE_CHECKING: + assert self.state_timestamp is not None return self.total_run_time + (pendulum.now("UTC") - self.state_timestamp) else: return self.total_run_time - @estimated_run_time.expression - def estimated_run_time(cls): + @estimated_run_time.inplace.expression + @classmethod + def _estimated_run_time_expression(cls) -> sa.Label[datetime.timedelta]: return ( sa.select( sa.case( @@ -489,7 +440,11 @@ def estimated_start_time_delta(self) -> datetime.timedelta: give up-to-date estimates, we estimate lateness for any runs that don't have a start time and are not in a final state and were expected to start already.""" - if self.start_time and self.start_time > self.expected_start_time: + if ( + self.start_time + and self.expected_start_time is not None + and self.start_time > (self.expected_start_time) + ): return self.start_time - self.expected_start_time elif ( self.start_time is None @@ -501,8 +456,11 @@ def estimated_start_time_delta(self) -> datetime.timedelta: else: return datetime.timedelta(0) - @estimated_start_time_delta.expression - def estimated_start_time_delta(cls): + @estimated_start_time_delta.inplace.expression + @classmethod + def _estimated_start_time_delta_expression( + cls, + ) -> sa.SQLColumnExpression[datetime.timedelta]: return sa.case( ( cls.start_time > cls.expected_start_time, @@ -524,97 +482,72 @@ class FlowRun(Run): """SQLAlchemy model of a flow run.""" flow_id: Mapped[uuid.UUID] = mapped_column( - UUID(), - sa.ForeignKey("flow.id", ondelete="cascade"), - nullable=False, - index=True, + sa.ForeignKey("flow.id", ondelete="cascade"), index=True ) - deployment_id: Mapped[Optional[uuid.UUID]] = mapped_column(UUID(), nullable=True) - work_queue_name = sa.Column(sa.String, index=True) - flow_version = sa.Column(sa.String, index=True) - deployment_version = sa.Column(sa.String, index=True) - parameters = sa.Column(JSON, server_default="{}", default=dict, nullable=False) - idempotency_key = sa.Column(sa.String) - context = sa.Column(JSON, server_default="{}", default=dict, nullable=False) - empirical_policy = sa.Column( + deployment_id: Mapped[Optional[uuid.UUID]] = mapped_column() + work_queue_name: Mapped[Optional[str]] = mapped_column(index=True) + flow_version: Mapped[Optional[str]] = mapped_column(index=True) + deployment_version: Mapped[Optional[str]] = mapped_column(index=True) + parameters: Mapped[dict[str, Any]] = mapped_column( + JSON, server_default="{}", default=dict + ) + idempotency_key: Mapped[Optional[str]] = mapped_column() + context: Mapped[dict[str, Any]] = mapped_column( + JSON, server_default="{}", default=dict + ) + empirical_policy: Mapped[schemas.core.FlowRunPolicy] = mapped_column( Pydantic(schemas.core.FlowRunPolicy), server_default="{}", default=schemas.core.FlowRunPolicy, - nullable=False, - ) - tags: Mapped[List[str]] = mapped_column( - JSON, server_default="[]", default=list, nullable=False - ) - labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( - JSON, nullable=True ) + tags: Mapped[list[str]] = mapped_column(JSON, server_default="[]", default=list) + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column(JSON) created_by: Mapped[Optional[schemas.core.CreatedBy]] = mapped_column( - Pydantic(schemas.core.CreatedBy), - server_default=None, - default=None, - nullable=True, + Pydantic(schemas.core.CreatedBy) ) - infrastructure_pid = sa.Column(sa.String) - job_variables = sa.Column(JSON, server_default="{}", default=dict, nullable=True) + infrastructure_pid: Mapped[Optional[str]] + job_variables: Mapped[Optional[dict[str, Any]]] = mapped_column( + JSON, server_default="{}", default=dict + ) - infrastructure_document_id = sa.Column( - UUID, - sa.ForeignKey("block_document.id", ondelete="CASCADE"), - nullable=True, - index=True, + infrastructure_document_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("block_document.id", ondelete="CASCADE"), index=True ) parent_task_run_id: Mapped[Optional[uuid.UUID]] = mapped_column( - UUID(), - sa.ForeignKey( - "task_run.id", - ondelete="SET NULL", - use_alter=True, - ), - index=True, + sa.ForeignKey("task_run.id", ondelete="SET NULL", use_alter=True), index=True ) - auto_scheduled = sa.Column( - sa.Boolean, server_default="0", default=False, nullable=False - ) + auto_scheduled: Mapped[bool] = mapped_column(server_default="0", default=False) # TODO remove this foreign key for significant delete performance gains - state_id = sa.Column( - UUID(), - sa.ForeignKey( - "flow_run_state.id", - ondelete="SET NULL", - use_alter=True, - ), + state_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("flow_run_state.id", ondelete="SET NULL", use_alter=True), index=True, ) work_queue_id: Mapped[Optional[uuid.UUID]] = mapped_column( - UUID, - sa.ForeignKey("work_queue.id", ondelete="SET NULL"), - nullable=True, - index=True, + sa.ForeignKey("work_queue.id", ondelete="SET NULL"), index=True ) # -------------------------- relationships # current states are eagerly loaded unless otherwise specified - _state = sa.orm.relationship( - "FlowRunState", + _state: Mapped[Optional["FlowRunState"]] = relationship( lazy="selectin", foreign_keys=[state_id], primaryjoin="FlowRunState.id==FlowRun.state_id", ) @hybrid_property - def state(self): + def state(self) -> Optional[FlowRunState]: return self._state - @state.setter - def state(self, value): + @state.inplace.setter + def _set_state(self, value: Optional[FlowRunState]) -> None: # because this is a slightly non-standard SQLAlchemy relationship, we # prefer an explicit setter method to a setter property, because # user expectations about SQLAlchemy attribute assignment might not be @@ -623,7 +556,7 @@ def state(self, value): # still works because the ORM model's __init__ depends on it. return self.set_state(value) - def set_state(self, state): + def set_state(self, state: Optional[FlowRunState]) -> None: """ If a state is assigned to this run, populate its run id. @@ -635,27 +568,23 @@ def set_state(self, state): state.flow_run_id = self.id self._state = state - flow = sa.orm.relationship("Flow", back_populates="flow_runs", lazy="raise") + flow: Mapped["Flow"] = relationship(back_populates="flow_runs", lazy="raise") - task_runs = sa.orm.relationship( - "TaskRun", + task_runs: Mapped[list["TaskRun"]] = relationship( back_populates="flow_run", lazy="raise", # foreign_keys=lambda: [flow_run_id], primaryjoin="TaskRun.flow_run_id==FlowRun.id", ) - parent_task_run = sa.orm.relationship( - "TaskRun", + parent_task_run: Mapped[Optional["TaskRun"]] = relationship( back_populates="subflow_run", lazy="raise", foreign_keys=[parent_task_run_id], ) - work_queue = sa.orm.relationship( - "WorkQueue", - lazy="selectin", - foreign_keys=[work_queue_id], + work_queue: Mapped[Optional["WorkQueue"]] = relationship( + lazy="selectin", foreign_keys=[work_queue_id] ) @declared_attr.directive @@ -719,81 +648,57 @@ def __table_args__(cls) -> Iterable[sa.Index]: ) +_TaskInput = Union[ + schemas.core.TaskRunResult, schemas.core.Parameter, schemas.core.Constant +] +_TaskInputs = dict[str, list[_TaskInput]] + + class TaskRun(Run): """SQLAlchemy model of a task run.""" - flow_run_id = sa.Column( - UUID(), - sa.ForeignKey("flow_run.id", ondelete="cascade"), - nullable=True, - index=True, + flow_run_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("flow_run.id", ondelete="cascade"), index=True ) - task_key = sa.Column(sa.String, nullable=False) - dynamic_key = sa.Column(sa.String, nullable=False) - cache_key = sa.Column(sa.String) - cache_expiration = sa.Column(Timestamp()) - task_version = sa.Column(sa.String) - flow_run_run_count = sa.Column( - sa.Integer, server_default="0", default=0, nullable=False - ) - empirical_policy = sa.Column( + task_key: Mapped[str] = mapped_column() + dynamic_key: Mapped[str] = mapped_column() + cache_key: Mapped[Optional[str]] + cache_expiration: Mapped[Optional[pendulum.DateTime]] + task_version: Mapped[Optional[str]] + flow_run_run_count: Mapped[int] = mapped_column(server_default="0", default=0) + empirical_policy: Mapped[schemas.core.TaskRunPolicy] = mapped_column( Pydantic(schemas.core.TaskRunPolicy), server_default="{}", default=schemas.core.TaskRunPolicy, - nullable=False, - ) - task_inputs = sa.Column( - Pydantic( - Dict[ - str, - List[ - Union[ - schemas.core.TaskRunResult, - schemas.core.Parameter, - schemas.core.Constant, - ] - ], - ] - ), - server_default="{}", - default=dict, - nullable=False, ) - tags: Mapped[List[str]] = mapped_column( - JSON, server_default="[]", default=list, nullable=False - ) - labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( - JSON, nullable=True + task_inputs: Mapped[_TaskInputs] = mapped_column( + Pydantic(_TaskInputs), server_default="{}", default=dict ) + tags: Mapped[list[str]] = mapped_column(JSON, server_default="[]", default=list) + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column(JSON) # TODO remove this foreign key for significant delete performance gains - state_id = sa.Column( - UUID(), - sa.ForeignKey( - "task_run_state.id", - ondelete="SET NULL", - use_alter=True, - ), + state_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("task_run_state.id", ondelete="SET NULL", use_alter=True), index=True, ) # -------------------------- relationships # current states are eagerly loaded unless otherwise specified - _state = sa.orm.relationship( - "TaskRunState", + _state: Mapped[Optional[TaskRunState]] = relationship( lazy="selectin", foreign_keys=[state_id], primaryjoin="TaskRunState.id==TaskRun.state_id", ) @hybrid_property - def state(self): + def state(self) -> Optional[TaskRunState]: return self._state - @state.setter - def state(self, value): + @state.inplace.setter + def _set_state(self, value: Optional[TaskRunState]) -> None: # because this is a slightly non-standard SQLAlchemy relationship, we # prefer an explicit setter method to a setter property, because # user expectations about SQLAlchemy attribute assignment might not be @@ -802,7 +707,7 @@ def state(self, value): # still works because the ORM model's __init__ depends on it. return self.set_state(value) - def set_state(self, state): + def set_state(self, state: Optional[TaskRunState]) -> None: """ If a state is assigned to this run, populate its run id. @@ -814,15 +719,13 @@ def set_state(self, state): state.task_run_id = self.id self._state = state - flow_run = sa.orm.relationship( - "FlowRun", + flow_run: Mapped[Optional["FlowRun"]] = relationship( back_populates="task_runs", lazy="raise", foreign_keys=[flow_run_id], ) - subflow_run = sa.orm.relationship( - "FlowRun", + subflow_run: Mapped["FlowRun"] = relationship( back_populates="parent_task_run", lazy="raise", # foreign_keys=["FlowRun.parent_task_run_id"], @@ -876,72 +779,58 @@ def __table_args__(cls) -> Iterable[sa.Index]: class DeploymentSchedule(Base): - deployment_id = sa.Column( - UUID(), - sa.ForeignKey("deployment.id", ondelete="CASCADE"), - nullable=False, - index=True, + deployment_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("deployment.id", ondelete="CASCADE"), index=True ) - schedule = sa.Column(Pydantic(schemas.schedules.SCHEDULE_TYPES), nullable=False) - active = sa.Column(sa.Boolean, nullable=False, default=True) - max_scheduled_runs = sa.Column(sa.Integer, nullable=True) + schedule: Mapped[schemas.schedules.SCHEDULE_TYPES] = mapped_column( + Pydantic(schemas.schedules.SCHEDULE_TYPES) + ) + active: Mapped[bool] = mapped_column(default=True) + max_scheduled_runs: Mapped[Optional[int]] class Deployment(Base): """SQLAlchemy model of a deployment.""" - name = sa.Column(sa.String, nullable=False) - version = sa.Column(sa.String, nullable=True) - description = sa.Column(sa.Text(), nullable=True) - work_queue_name = sa.Column(sa.String, nullable=True, index=True) - infra_overrides = sa.Column(JSON, server_default="{}", default=dict, nullable=False) - path = sa.Column(sa.String, nullable=True) - entrypoint = sa.Column(sa.String, nullable=True) + name: Mapped[str] + version: Mapped[Optional[str]] + description: Mapped[Optional[str]] = mapped_column(sa.Text()) + work_queue_name: Mapped[Optional[str]] = mapped_column(index=True) + infra_overrides: Mapped[dict[str, Any]] = mapped_column( + JSON, server_default="{}", default=dict + ) + path: Mapped[Optional[str]] + entrypoint: Mapped[Optional[str]] - last_polled = sa.Column(Timestamp(), nullable=True) - status = sa.Column( + last_polled: Mapped[Optional[pendulum.DateTime]] + status: Mapped[DeploymentStatus] = mapped_column( sa.Enum(DeploymentStatus, name="deployment_status"), - nullable=False, default=DeploymentStatus.NOT_READY, server_default="NOT_READY", ) @declared_attr - def job_variables(self): + def job_variables(self) -> Mapped[dict[str, Any]]: return synonym("infra_overrides") flow_id: Mapped[uuid.UUID] = mapped_column( - UUID, - sa.ForeignKey("flow.id", ondelete="CASCADE"), - nullable=False, - index=True, + sa.ForeignKey("flow.id", ondelete="CASCADE"), index=True ) work_queue_id: Mapped[Optional[uuid.UUID]] = mapped_column( - UUID, - sa.ForeignKey("work_queue.id", ondelete="SET NULL"), - nullable=True, - index=True, - ) - paused = sa.Column( - sa.Boolean, nullable=False, server_default="0", default=False, index=True + sa.ForeignKey("work_queue.id", ondelete="SET NULL"), index=True ) + paused: Mapped[bool] = mapped_column(server_default="0", default=False, index=True) - schedules = sa.orm.relationship( - "DeploymentSchedule", - lazy="selectin", - order_by=sa.desc(sa.text("updated")), + schedules: Mapped[list["DeploymentSchedule"]] = relationship( + lazy="selectin", order_by=sa.desc(sa.text("updated")) ) # deprecated in favor of `concurrency_limit_id` FK - _concurrency_limit: Mapped[Optional[int]] = mapped_column( - sa.Integer, default=None, nullable=True, name="concurrency_limit" - ) + _concurrency_limit: Mapped[Optional[int]] = mapped_column(name="concurrency_limit") concurrency_limit_id: Mapped[Optional[uuid.UUID]] = mapped_column( - UUID, sa.ForeignKey("concurrency_limit_v2.id", ondelete="SET NULL"), - nullable=True, ) global_concurrency_limit: Mapped[ Optional["ConcurrencyLimitV2"] @@ -957,52 +846,45 @@ def job_variables(self): default=None, ) - tags: Mapped[List[str]] = mapped_column( - JSON, server_default="[]", default=list, nullable=False + tags: Mapped[list[str]] = mapped_column(JSON, server_default="[]", default=list) + labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column(JSON) + parameters: Mapped[dict[str, Any]] = mapped_column( + JSON, server_default="{}", default=dict ) - labels: Mapped[Optional[schemas.core.KeyValueLabels]] = mapped_column( - JSON, nullable=True + pull_steps: Mapped[Optional[list[dict[str, Any]]]] = mapped_column( + JSON, default=list ) - parameters = sa.Column(JSON, server_default="{}", default=dict, nullable=False) - pull_steps = sa.Column(JSON, default=list, nullable=True) - parameter_openapi_schema = sa.Column(JSON, default=dict, nullable=True) - enforce_parameter_schema = sa.Column( - sa.Boolean, default=True, server_default="0", nullable=False + parameter_openapi_schema: Mapped[Optional[dict[str, Any]]] = mapped_column( + JSON, default=dict ) - created_by = sa.Column( - Pydantic(schemas.core.CreatedBy), - server_default=None, - default=None, - nullable=True, + enforce_parameter_schema: Mapped[bool] = mapped_column( + default=True, server_default="0" ) - updated_by = sa.Column( - Pydantic(schemas.core.UpdatedBy), - server_default=None, - default=None, - nullable=True, + created_by: Mapped[Optional[schemas.core.CreatedBy]] = mapped_column( + Pydantic(schemas.core.CreatedBy) + ) + updated_by: Mapped[Optional[schemas.core.UpdatedBy]] = mapped_column( + Pydantic(schemas.core.UpdatedBy) ) - infrastructure_document_id = sa.Column( - UUID, - sa.ForeignKey("block_document.id", ondelete="CASCADE"), - nullable=True, - index=False, + infrastructure_document_id: Mapped[Optional[uuid.UUID]] = mapped_column( + sa.ForeignKey("block_document.id", ondelete="CASCADE"), index=False ) - storage_document_id = sa.Column( - UUID, + storage_document_id: Mapped[Optional[uuid.UUID]] = mapped_column( sa.ForeignKey("block_document.id", ondelete="CASCADE"), - nullable=True, index=False, ) - flow = sa.orm.relationship("Flow", back_populates="deployments", lazy="raise") + flow: Mapped["Flow"] = relationship( + "Flow", back_populates="deployments", lazy="raise" + ) - work_queue = sa.orm.relationship( - "WorkQueue", lazy="selectin", foreign_keys=[work_queue_id] + work_queue: Mapped[Optional["WorkQueue"]] = relationship( + lazy="selectin", foreign_keys=[work_queue_id] ) - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_deployment__flow_id_name", "flow_id", @@ -1024,16 +906,16 @@ class Log(Base): SQLAlchemy model of a logging statement. """ - name = sa.Column(sa.String, nullable=False) - level = sa.Column(sa.SmallInteger, nullable=False, index=True) - flow_run_id = sa.Column(UUID(), nullable=True, index=True) - task_run_id = sa.Column(UUID(), nullable=True, index=True) - message = sa.Column(sa.Text, nullable=False) + name: Mapped[str] + level: Mapped[int] = mapped_column(sa.SmallInteger, index=True) + flow_run_id: Mapped[Optional[uuid.UUID]] = mapped_column(index=True) + task_run_id: Mapped[Optional[uuid.UUID]] = mapped_column(index=True) + message: Mapped[str] = mapped_column(sa.Text) # The client-side timestamp of this logged statement. - timestamp = sa.Column(Timestamp(), nullable=False, index=True) + timestamp: Mapped[pendulum.DateTime] = mapped_column(index=True) - __table_args__ = ( + __table_args__: Any = ( sa.Index( "ix_log__flow_run_id_timestamp", "flow_run_id", @@ -1043,40 +925,38 @@ class Log(Base): class ConcurrencyLimit(Base): - tag = sa.Column(sa.String, nullable=False) - concurrency_limit = sa.Column(sa.Integer, nullable=False) - active_slots: Mapped[List[str]] = mapped_column( - JSON, server_default="[]", default=list, nullable=False + tag: Mapped[str] + concurrency_limit: Mapped[int] + active_slots: Mapped[list[str]] = mapped_column( + JSON, server_default="[]", default=list ) - __table_args__ = (sa.Index("uq_concurrency_limit__tag", "tag", unique=True),) + __table_args__: Any = (sa.Index("uq_concurrency_limit__tag", "tag", unique=True),) class ConcurrencyLimitV2(Base): - active = sa.Column(sa.Boolean, nullable=False, default=True) - name = sa.Column(sa.String, nullable=False) - limit = sa.Column(sa.Integer, nullable=False) - active_slots = sa.Column(sa.Integer, nullable=False, default=0) - denied_slots = sa.Column(sa.Integer, nullable=False, default=0) + active: Mapped[bool] = mapped_column(default=True) + name: Mapped[str] + limit: Mapped[int] + active_slots: Mapped[int] = mapped_column(default=0) + denied_slots: Mapped[int] = mapped_column(default=0) - slot_decay_per_second = sa.Column(sa.Float, default=0.0, nullable=False) - avg_slot_occupancy_seconds = sa.Column(sa.Float, default=2.0, nullable=False) + slot_decay_per_second: Mapped[float] = mapped_column(default=0.0) + avg_slot_occupancy_seconds: Mapped[float] = mapped_column(default=2.0) - __table_args__ = (sa.UniqueConstraint("name"),) + __table_args__: Any = (sa.UniqueConstraint("name"),) class BlockType(Base): - name = sa.Column(sa.String, nullable=False) - slug = sa.Column(sa.String, nullable=False) - logo_url = sa.Column(sa.String, nullable=True) - documentation_url = sa.Column(sa.String, nullable=True) - description = sa.Column(sa.String, nullable=True) - code_example = sa.Column(sa.String, nullable=True) - is_protected = sa.Column( - sa.Boolean, nullable=False, server_default="0", default=False - ) - - __table_args__ = ( + name: Mapped[str] + slug: Mapped[str] + logo_url: Mapped[Optional[str]] + documentation_url: Mapped[Optional[str]] + description: Mapped[Optional[str]] + code_example: Mapped[Optional[str]] + is_protected: Mapped[bool] = mapped_column(server_default="0", default=False) + + __table_args__: Any = ( sa.Index( "uq_block_type__slug", "slug", @@ -1089,25 +969,24 @@ class BlockType(Base): class BlockSchema(Base): - checksum = sa.Column(sa.String, nullable=False) - fields = sa.Column(JSON, server_default="{}", default=dict, nullable=False) - capabilities = sa.Column(JSON, server_default="[]", default=list, nullable=False) - version = sa.Column( - sa.String, + checksum: Mapped[str] + fields: Mapped[dict[str, Any]] = mapped_column( + JSON, server_default="{}", default=dict + ) + capabilities: Mapped[list[str]] = mapped_column( + JSON, server_default="[]", default=list + ) + version: Mapped[str] = mapped_column( server_default=schemas.core.DEFAULT_BLOCK_SCHEMA_VERSION, - nullable=False, ) - block_type_id: Mapped[UUID] = mapped_column( - UUID(), - sa.ForeignKey("block_type.id", ondelete="cascade"), - nullable=False, - index=True, + block_type_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("block_type.id", ondelete="cascade"), index=True ) - block_type = sa.orm.relationship("BlockType", lazy="selectin") + block_type: Mapped["BlockType"] = relationship(lazy="selectin") - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_block_schema__checksum_version", "checksum", @@ -1122,45 +1001,37 @@ class BlockSchema(Base): class BlockSchemaReference(Base): - name = sa.Column(sa.String, nullable=False) + name: Mapped[str] - parent_block_schema_id = sa.Column( - UUID(), - sa.ForeignKey("block_schema.id", ondelete="cascade"), - nullable=False, + parent_block_schema_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("block_schema.id", ondelete="cascade") ) - reference_block_schema_id = sa.Column( - UUID(), - sa.ForeignKey("block_schema.id", ondelete="cascade"), - nullable=False, + reference_block_schema_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("block_schema.id", ondelete="cascade") ) class BlockDocument(Base): - name = sa.Column(sa.String, nullable=False, index=True) - data = sa.Column(JSON, server_default="{}", default=dict, nullable=False) - is_anonymous = sa.Column(sa.Boolean, server_default="0", index=True, nullable=False) + name: Mapped[str] = mapped_column(index=True) + data: Mapped[Any] = mapped_column(JSON, server_default="{}", default=dict) + is_anonymous: Mapped[bool] = mapped_column(server_default="0", index=True) - block_type_name = sa.Column(sa.String, nullable=True) + block_type_name: Mapped[Optional[str]] - block_type_id = sa.Column( - UUID(), - sa.ForeignKey("block_type.id", ondelete="cascade"), - nullable=False, + block_type_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("block_type.id", ondelete="cascade") ) - block_type = sa.orm.relationship("BlockType", lazy="selectin") + block_type: Mapped["BlockType"] = relationship(lazy="selectin") - block_schema_id = sa.Column( - UUID(), - sa.ForeignKey("block_schema.id", ondelete="cascade"), - nullable=False, + block_schema_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("block_schema.id", ondelete="cascade") ) - block_schema = sa.orm.relationship("BlockSchema", lazy="selectin") + block_schema: Mapped["BlockSchema"] = relationship(lazy="selectin") - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_block__type_id_name", "block_type_id", @@ -1173,7 +1044,7 @@ class BlockDocument(Base): ), ) - async def encrypt_data(self, session, data): + async def encrypt_data(self, session: AsyncSession, data: dict[str, Any]) -> None: """ Store encrypted data on the ORM model @@ -1181,7 +1052,7 @@ async def encrypt_data(self, session, data): """ self.data = await encrypt_fernet(session, data) - async def decrypt_data(self, session): + async def decrypt_data(self, session: AsyncSession) -> dict[str, Any]: """ Retrieve decrypted data from the ORM model. @@ -1191,83 +1062,61 @@ async def decrypt_data(self, session): class BlockDocumentReference(Base): - name: Mapped[str] = mapped_column(sa.String, nullable=False) + name: Mapped[str] - parent_block_document_id: Mapped[UUID] = mapped_column( - UUID(), + parent_block_document_id: Mapped[uuid.UUID] = mapped_column( sa.ForeignKey("block_document.id", ondelete="cascade"), - nullable=False, ) - reference_block_document_id: Mapped[UUID] = mapped_column( - UUID(), + reference_block_document_id: Mapped[uuid.UUID] = mapped_column( sa.ForeignKey("block_document.id", ondelete="cascade"), - nullable=False, ) class Configuration(Base): - key = sa.Column(sa.String, nullable=False, index=True) - value: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=False) + key: Mapped[str] = mapped_column(index=True) + value: Mapped[Dict[str, Any]] = mapped_column(JSON) - __table_args__ = (sa.UniqueConstraint("key"),) + __table_args__: Any = (sa.UniqueConstraint("key"),) class SavedSearch(Base): """SQLAlchemy model of a saved search.""" - name = sa.Column(sa.String, nullable=False) - filters = sa.Column( - JSON, - server_default="[]", - default=list, - nullable=False, + name: Mapped[str] + filters: Mapped[list[dict[str, Any]]] = mapped_column( + JSON, server_default="[]", default=list ) - __table_args__ = (sa.UniqueConstraint("name"),) + __table_args__: Any = (sa.UniqueConstraint("name"),) class WorkQueue(Base): """SQLAlchemy model of a work queue""" - name = sa.Column(sa.String, nullable=False) + name: Mapped[str] - filter = sa.Column( - Pydantic(schemas.core.QueueFilter), - server_default=None, - default=None, - nullable=True, + filter: Mapped[Optional[schemas.core.QueueFilter]] = mapped_column( + Pydantic(schemas.core.QueueFilter) ) - description = sa.Column(sa.String, nullable=False, default="", server_default="") - is_paused = sa.Column(sa.Boolean, nullable=False, server_default="0", default=False) - concurrency_limit: Mapped[int] = mapped_column( - sa.Integer, - nullable=True, - ) - priority: Mapped[int] = mapped_column(sa.Integer, nullable=False) + description: Mapped[str] = mapped_column(default="", server_default="") + is_paused: Mapped[bool] = mapped_column(server_default="0", default=False) + concurrency_limit: Mapped[Optional[int]] + priority: Mapped[int] - last_polled: Mapped[Optional[pendulum.DateTime]] = mapped_column( - Timestamp(), - nullable=True, - ) - status = sa.Column( + last_polled: Mapped[Optional[pendulum.DateTime]] + status: Mapped[WorkQueueStatus] = mapped_column( sa.Enum(WorkQueueStatus, name="work_queue_status"), - nullable=False, default=WorkQueueStatus.NOT_READY, - server_default=WorkQueueStatus.NOT_READY.value, + server_default=WorkQueueStatus.NOT_READY, ) work_pool_id: Mapped[uuid.UUID] = mapped_column( - UUID, - sa.ForeignKey("work_pool.id", ondelete="cascade"), - nullable=False, - index=True, + sa.ForeignKey("work_pool.id", ondelete="cascade"), index=True ) - work_pool = sa.orm.relationship( - "WorkPool", - lazy="selectin", - foreign_keys=[work_pool_id], + work_pool: Mapped["WorkPool"] = relationship( + lazy="selectin", foreign_keys=[work_pool_id] ) __table_args__ = ( @@ -1282,68 +1131,51 @@ class WorkQueue(Base): class WorkPool(Base): """SQLAlchemy model of an worker""" - name = sa.Column(sa.String, nullable=False) - description = sa.Column(sa.String) - type: Mapped[str] = mapped_column(sa.String, index=True) - base_job_template = sa.Column(JSON, nullable=False, server_default="{}", default={}) - is_paused: Mapped[bool] = mapped_column( - sa.Boolean, nullable=False, server_default="0", default=False + name: Mapped[str] + description: Mapped[Optional[str]] + type: Mapped[str] = mapped_column(index=True) + base_job_template: Mapped[dict[str, Any]] = mapped_column( + JSON, server_default="{}", default={} ) - default_queue_id: Mapped[UUID] = mapped_column( + is_paused: Mapped[bool] = mapped_column(server_default="0", default=False) + default_queue_id: Mapped[Optional[uuid.UUID]] = mapped_column( UUID, sa.ForeignKey("work_queue.id", ondelete="RESTRICT", use_alter=True), nullable=True, ) - concurrency_limit = sa.Column( - sa.Integer, - nullable=True, - ) + concurrency_limit: Mapped[Optional[int]] status: Mapped[WorkPoolStatus] = mapped_column( sa.Enum(WorkPoolStatus, name="work_pool_status"), - nullable=False, default=WorkPoolStatus.NOT_READY, - server_default=WorkPoolStatus.NOT_READY.value, - ) - last_transitioned_status_at: Mapped[Optional[pendulum.DateTime]] = mapped_column( - Timestamp(), nullable=True - ) - last_status_event_id: Mapped[Optional[uuid.UUID]] = mapped_column( - UUID, nullable=True + server_default=WorkPoolStatus.NOT_READY, ) + last_transitioned_status_at: Mapped[Optional[pendulum.DateTime]] + last_status_event_id: Mapped[Optional[uuid.UUID]] - __table_args__ = (sa.UniqueConstraint("name"),) + __table_args__: Any = (sa.UniqueConstraint("name"),) class Worker(Base): """SQLAlchemy model of an worker""" - @declared_attr - def work_pool_id(cls): - return sa.Column( - UUID, - sa.ForeignKey("work_pool.id", ondelete="cascade"), - nullable=False, - index=True, - ) + work_pool_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("work_pool.id", ondelete="cascade"), index=True + ) - name = sa.Column(sa.String, nullable=False) - last_heartbeat_time = sa.Column( - Timestamp(), - nullable=False, - server_default=now(), - default=lambda: pendulum.now("UTC"), + name: Mapped[str] + last_heartbeat_time: Mapped[pendulum.DateTime] = mapped_column( + server_default=now(), default=lambda: pendulum.now("UTC") ) - heartbeat_interval_seconds = sa.Column(sa.Integer, nullable=True) + heartbeat_interval_seconds: Mapped[Optional[int]] - status = sa.Column( + status: Mapped[WorkerStatus] = mapped_column( sa.Enum(WorkerStatus, name="worker_status"), - nullable=False, default=WorkerStatus.OFFLINE, - server_default=WorkerStatus.OFFLINE.value, + server_default=WorkerStatus.OFFLINE, ) - __table_args__ = ( + __table_args__: Any = ( sa.UniqueConstraint("work_pool_id", "name"), sa.Index( "ix_worker__work_pool_id_last_heartbeat_time", @@ -1356,43 +1188,33 @@ def work_pool_id(cls): class Agent(Base): """SQLAlchemy model of an agent""" - name = sa.Column(sa.String, nullable=False) + name: Mapped[str] - work_queue_id = sa.Column( - UUID, - sa.ForeignKey("work_queue.id"), - nullable=False, - index=True, + work_queue_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("work_queue.id"), index=True ) - last_activity_time = sa.Column( - Timestamp(), - nullable=False, - server_default=now(), - default=lambda: pendulum.now("UTC"), + last_activity_time: Mapped[pendulum.DateTime] = mapped_column( + server_default=now(), default=lambda: pendulum.now("UTC") ) - __table_args__ = (sa.UniqueConstraint("name"),) + __table_args__: Any = (sa.UniqueConstraint("name"),) class FlowRunNotificationPolicy(Base): - is_active = sa.Column(sa.Boolean, server_default="1", default=True, nullable=False) - state_names = sa.Column(JSON, server_default="[]", default=[], nullable=False) - tags: Mapped[List[str]] = mapped_column( - JSON, server_default="[]", default=[], nullable=False + is_active: Mapped[bool] = mapped_column(server_default="1", default=True) + state_names: Mapped[list[str]] = mapped_column( + JSON, server_default="[]", default=[] ) - message_template = sa.Column(sa.String, nullable=True) + tags: Mapped[list[str]] = mapped_column(JSON, server_default="[]", default=[]) + message_template: Mapped[Optional[str]] - block_document_id = sa.Column( - UUID(), - sa.ForeignKey("block_document.id", ondelete="cascade"), - nullable=False, + block_document_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("block_document.id", ondelete="cascade") ) - block_document = sa.orm.relationship( - "BlockDocument", - lazy="selectin", - foreign_keys=[block_document_id], + block_document: Mapped["BlockDocument"] = relationship( + lazy="selectin", foreign_keys=[block_document_id] ) @@ -1400,64 +1222,60 @@ class FlowRunNotificationQueue(Base): # these are both foreign keys but there is no need to enforce that constraint # as this is just a queue for service workers; if the keys don't match at the # time work is pulled, the work can be discarded - flow_run_notification_policy_id = sa.Column(UUID, nullable=False) - flow_run_state_id = sa.Column(UUID, nullable=False) + flow_run_notification_policy_id: Mapped[uuid.UUID] + flow_run_state_id: Mapped[uuid.UUID] class Variable(Base): - name = sa.Column(sa.String, nullable=False) - value: Optional[Any] = sa.Column(JSON) - tags = sa.Column(JSON, server_default="[]", default=list, nullable=False) + name: Mapped[str] + value: Mapped[Optional[Any]] = mapped_column(JSON) + tags: Mapped[list[str]] = mapped_column(JSON, server_default="[]", default=list) - __table_args__ = (sa.UniqueConstraint("name"),) + __table_args__: Any = (sa.UniqueConstraint("name"),) class FlowRunInput(Base): - flow_run_id = sa.Column( - UUID(), sa.ForeignKey("flow_run.id", ondelete="cascade"), nullable=False + flow_run_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("flow_run.id", ondelete="cascade") ) - key = sa.Column(sa.String, nullable=False) - value = sa.Column(sa.Text(), nullable=False) - sender = sa.Column(sa.String, nullable=True) + key: Mapped[str] + value: Mapped[str] = mapped_column(sa.Text()) + sender: Mapped[Optional[str]] - __table_args__ = (sa.UniqueConstraint("flow_run_id", "key"),) + __table_args__: Any = (sa.UniqueConstraint("flow_run_id", "key"),) class CsrfToken(Base): - token = sa.Column(sa.String, nullable=False) - client = sa.Column(sa.String, nullable=False, unique=True) - expiration = sa.Column(Timestamp(), nullable=False) + token: Mapped[str] + client: Mapped[str] = mapped_column(unique=True) + expiration: Mapped[pendulum.DateTime] class Automation(Base): - name = sa.Column(sa.String, nullable=False) - description = sa.Column(sa.String, nullable=False, default="") + name: Mapped[str] + description: Mapped[str] = mapped_column(default="") - enabled = sa.Column(sa.Boolean, nullable=False, server_default="1", default=True) + enabled: Mapped[bool] = mapped_column(server_default="1", default=True) - trigger = sa.Column(Pydantic(ServerTriggerTypes), nullable=False) + trigger: Mapped[ServerTriggerTypes] = mapped_column(Pydantic(ServerTriggerTypes)) - actions = sa.Column(Pydantic(List[ServerActionTypes]), nullable=False) - actions_on_trigger = sa.Column( - Pydantic(List[ServerActionTypes]), - server_default="[]", - default=list, - nullable=False, + actions: Mapped[ServerActionTypes] = mapped_column( + Pydantic(list[ServerActionTypes]) + ) + actions_on_trigger: Mapped[list[ServerActionTypes]] = mapped_column( + Pydantic(list[ServerActionTypes]), server_default="[]", default=list ) - actions_on_resolve = sa.Column( - Pydantic(List[ServerActionTypes]), - server_default="[]", - default=list, - nullable=False, + actions_on_resolve: Mapped[list[ServerActionTypes]] = mapped_column( + Pydantic(list[ServerActionTypes]), server_default="[]", default=list ) - related_resources = sa.orm.relationship( + related_resources: Mapped[list["AutomationRelatedResource"]] = relationship( "AutomationRelatedResource", back_populates="automation", lazy="raise" ) @classmethod - def sort_expression(cls, value: AutomationSort) -> ColumnElement: + def sort_expression(cls, value: AutomationSort) -> sa.ColumnExpressionArgument[Any]: """Return an expression used to sort Automations""" sort_mapping = { AutomationSort.CREATED_DESC: cls.created.desc(), @@ -1469,7 +1287,7 @@ def sort_expression(cls, value: AutomationSort) -> ColumnElement: class AutomationBucket(Base): - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_automation_bucket__automation_id__trigger_id__bucketing_key", "automation_id", @@ -1484,28 +1302,30 @@ class AutomationBucket(Base): ), ) - automation_id = sa.Column( - UUID(), sa.ForeignKey("automation.id", ondelete="CASCADE"), nullable=False + automation_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("automation.id", ondelete="CASCADE") ) - trigger_id = sa.Column(UUID, nullable=False) + trigger_id: Mapped[uuid.UUID] - bucketing_key = sa.Column(JSON, server_default="[]", default=list, nullable=False) + bucketing_key: Mapped[list[str]] = mapped_column( + JSON, server_default="[]", default=list + ) - last_event = sa.Column(Pydantic(ReceivedEvent), nullable=True) + last_event: Mapped[Optional[ReceivedEvent]] = mapped_column(Pydantic(ReceivedEvent)) - start = sa.Column(Timestamp(), nullable=False) - end = sa.Column(Timestamp(), nullable=False) + start: Mapped[pendulum.DateTime] + end: Mapped[pendulum.DateTime] - count = sa.Column(sa.Integer, nullable=False) + count: Mapped[int] - last_operation = sa.Column(sa.String, nullable=True) + last_operation: Mapped[Optional[str]] - triggered_at = sa.Column(Timestamp(), nullable=True) + triggered_at: Mapped[Optional[pendulum.DateTime]] class AutomationRelatedResource(Base): - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_automation_related_resource__automation_id__resource_id", "automation_id", @@ -1514,22 +1334,22 @@ class AutomationRelatedResource(Base): ), ) - automation_id = sa.Column( - UUID(), sa.ForeignKey("automation.id", ondelete="CASCADE"), nullable=False + automation_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("automation.id", ondelete="CASCADE") ) - resource_id = sa.Column(sa.String, index=True) - automation_owned_by_resource = sa.Column( - sa.Boolean, nullable=False, default=False, server_default="0" + resource_id: Mapped[Optional[str]] = mapped_column(index=True) + automation_owned_by_resource: Mapped[bool] = mapped_column( + default=False, server_default="0" ) - automation = sa.orm.relationship( + automation: Mapped["Automation"] = relationship( "Automation", back_populates="related_resources", lazy="raise" ) class CompositeTriggerChildFiring(Base): - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_composite_trigger_child_firing__a_id__pt_id__ct__id", "automation_id", @@ -1539,20 +1359,20 @@ class CompositeTriggerChildFiring(Base): ), ) - automation_id = sa.Column( - UUID(), sa.ForeignKey("automation.id", ondelete="CASCADE"), nullable=False + automation_id: Mapped[uuid.UUID] = mapped_column( + sa.ForeignKey("automation.id", ondelete="CASCADE") ) - parent_trigger_id = sa.Column(UUID(), nullable=False) + parent_trigger_id: Mapped[uuid.UUID] - child_trigger_id = sa.Column(UUID(), nullable=False) - child_firing_id = sa.Column(UUID(), nullable=False) - child_fired_at = sa.Column(Timestamp()) - child_firing = sa.Column(Pydantic(Firing), nullable=False) + child_trigger_id: Mapped[uuid.UUID] + child_firing_id: Mapped[uuid.UUID] + child_fired_at: Mapped[Optional[pendulum.DateTime]] + child_firing: Mapped[Firing] = mapped_column(Pydantic(Firing)) class AutomationEventFollower(Base): - __table_args__ = ( + __table_args__: Any = ( sa.Index( "uq_follower_for_scope", "scope", @@ -1560,19 +1380,19 @@ class AutomationEventFollower(Base): unique=True, ), ) - scope = sa.Column(sa.String, nullable=False, default="", index=True) - leader_event_id = sa.Column(UUID(), nullable=False, index=True) - follower_event_id = sa.Column(UUID(), nullable=False) - received = sa.Column(Timestamp(), nullable=False, index=True) - follower = sa.Column(Pydantic(ReceivedEvent), nullable=False) + scope: Mapped[str] = mapped_column(default="", index=True) + leader_event_id: Mapped[uuid.UUID] = mapped_column(index=True) + follower_event_id: Mapped[uuid.UUID] + received: Mapped[pendulum.DateTime] = mapped_column(index=True) + follower: Mapped[ReceivedEvent] = mapped_column(Pydantic(ReceivedEvent)) class Event(Base): - @declared_attr - def __tablename__(cls): + @declared_attr.directive + def __tablename__(cls) -> str: return "events" - __table_args__ = ( + __table_args__: Any = ( sa.Index("ix_events__related_resource_ids", "related_resource_ids"), sa.Index("ix_events__occurred", "occurred"), sa.Index("ix_events__event__id", "event", "id"), @@ -1587,26 +1407,28 @@ def __tablename__(cls): sa.Index("ix_events__event_related_occurred", "event", "related", "occurred"), ) - occurred = sa.Column(Timestamp(), nullable=False) - event = sa.Column(sa.Text(), nullable=False) - resource_id = sa.Column(sa.Text(), nullable=False) - resource = sa.Column(JSON(), nullable=False) - related_resource_ids = sa.Column( - JSON(), server_default="[]", default=list, nullable=False + occurred: Mapped[pendulum.DateTime] + event: Mapped[str] = mapped_column(sa.Text()) + resource_id: Mapped[str] = mapped_column(sa.Text()) + resource: Mapped[dict[str, Any]] = mapped_column(JSON()) + related_resource_ids: Mapped[list[str]] = mapped_column( + JSON(), server_default="[]", default=list ) - related = sa.Column(JSON(), server_default="[]", default=list, nullable=False) - payload = sa.Column(JSON(), nullable=False) - received = sa.Column(Timestamp(), nullable=False) - recorded = sa.Column(Timestamp(), nullable=False) - follows = sa.Column(UUID(), nullable=True) + related: Mapped[list[dict[str, Any]]] = mapped_column( + JSON(), server_default="[]", default=list + ) + payload: Mapped[dict[str, Any]] = mapped_column(JSON()) + received: Mapped[pendulum.DateTime] + recorded: Mapped[pendulum.DateTime] + follows: Mapped[Optional[uuid.UUID]] class EventResource(Base): - @declared_attr - def __tablename__(cls): + @declared_attr.directive + def __tablename__(cls) -> str: return "event_resources" - __table_args__ = ( + __table_args__: Any = ( sa.Index( "ix_event_resources__resource_id__occurred", "resource_id", @@ -1614,11 +1436,11 @@ def __tablename__(cls): ), ) - occurred = sa.Column("occurred", Timestamp(), nullable=False) - resource_id = sa.Column("resource_id", sa.Text(), nullable=False) - resource_role = sa.Column("resource_role", sa.Text(), nullable=False) - resource = sa.Column("resource", sa.JSON(), nullable=False) - event_id = sa.Column("event_id", UUID(), nullable=False) + occurred: Mapped[pendulum.DateTime] + resource_id: Mapped[str] = mapped_column(sa.Text()) + resource_role: Mapped[str] = mapped_column(sa.Text()) + resource: Mapped[dict[str, Any]] = mapped_column(sa.JSON()) + event_id: Mapped[uuid.UUID] # These are temporary until we've migrated all the references to the new, @@ -1663,6 +1485,9 @@ def __tablename__(cls): ORMEventResource = EventResource +_UpsertColumns = Iterable[Union[str, "sa.Column[Any]", roles.DDLConstraintColumnRole]] + + class BaseORMConfiguration(ABC): """ Abstract base class used to inject database-specific ORM configuration into Prefect. @@ -1671,7 +1496,7 @@ class BaseORMConfiguration(ABC): Use with caution. """ - def _unique_key(self) -> Tuple[Hashable, ...]: + def _unique_key(self) -> tuple[Hashable, ...]: """ Returns a key used to determine whether to instantiate a new DB interface. """ @@ -1679,52 +1504,52 @@ def _unique_key(self) -> Tuple[Hashable, ...]: @property @abstractmethod - def versions_dir(self): + def versions_dir(self) -> Path: """Directory containing migrations""" ... @property - def deployment_unique_upsert_columns(self): + def deployment_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a Deployment""" return [Deployment.flow_id, Deployment.name] @property - def concurrency_limit_unique_upsert_columns(self): + def concurrency_limit_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a ConcurrencyLimit""" return [ConcurrencyLimit.tag] @property - def flow_run_unique_upsert_columns(self): + def flow_run_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a FlowRun""" return [FlowRun.flow_id, FlowRun.idempotency_key] @property - def block_type_unique_upsert_columns(self): + def block_type_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a BlockType""" return [BlockType.slug] @property - def artifact_collection_unique_upsert_columns(self): + def artifact_collection_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting an ArtifactCollection""" return [ArtifactCollection.key] @property - def block_schema_unique_upsert_columns(self): + def block_schema_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a BlockSchema""" return [BlockSchema.checksum, BlockSchema.version] @property - def flow_unique_upsert_columns(self): + def flow_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a Flow""" return [Flow.name] @property - def saved_search_unique_upsert_columns(self): + def saved_search_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a SavedSearch""" return [SavedSearch.name] @property - def task_run_unique_upsert_columns(self): + def task_run_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a TaskRun""" return [ TaskRun.flow_run_id, @@ -1733,7 +1558,7 @@ def task_run_unique_upsert_columns(self): ] @property - def block_document_unique_upsert_columns(self): + def block_document_unique_upsert_columns(self) -> _UpsertColumns: """Unique columns for upserting a BlockDocument""" return [BlockDocument.block_type_id, BlockDocument.name] @@ -1744,6 +1569,8 @@ class AsyncPostgresORMConfiguration(BaseORMConfiguration): @property def versions_dir(self) -> Path: """Directory containing migrations""" + import prefect.server.database + return ( Path(prefect.server.database.__file__).parent / "_migrations" @@ -1758,6 +1585,8 @@ class AioSqliteORMConfiguration(BaseORMConfiguration): @property def versions_dir(self) -> Path: """Directory containing migrations""" + import prefect.server.database + return ( Path(prefect.server.database.__file__).parent / "_migrations" From 53224d086c183207223e08a219d39aa9b22c91d6 Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Thu, 5 Dec 2024 14:28:11 -0800 Subject: [PATCH 17/92] [UI v2] feat: Adding mutations for global concurrency limit (#16224) --- .../hooks/global-concurrency-limits.test.tsx | 212 +++++++++++++++--- ui-v2/src/hooks/global-concurrency-limits.ts | 172 +++++++++++--- ui-v2/tests/mocks/handlers.ts | 22 ++ 3 files changed, 342 insertions(+), 64 deletions(-) diff --git a/ui-v2/src/hooks/global-concurrency-limits.test.tsx b/ui-v2/src/hooks/global-concurrency-limits.test.tsx index a9675017440c..cd94a8f47ab0 100644 --- a/ui-v2/src/hooks/global-concurrency-limits.test.tsx +++ b/ui-v2/src/hooks/global-concurrency-limits.test.tsx @@ -1,12 +1,15 @@ import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; -import { renderHook, waitFor } from "@testing-library/react"; +import { act, renderHook, waitFor } from "@testing-library/react"; import { http, HttpResponse } from "msw"; import { describe, expect, it } from "vitest"; import { type GlobalConcurrencyLimit, - useGetGlobalConcurrencyLimit, + queryKeyFactory, + useCreateGlobalConcurrencyLimit, + useDeleteGlobalConcurrencyLimit, useListGlobalConcurrencyLimits, + useUpdateGlobalConcurrencyLimit, } from "./global-concurrency-limits"; import { server } from "../../tests/mocks/node"; @@ -25,17 +28,6 @@ describe("global concurrency limits hooks", () => { }, ]; - const seedGlobalConcurrencyLimitDetails = () => ({ - id: "0", - created: "2021-01-01T00:00:00Z", - updated: "2021-01-01T00:00:00Z", - active: false, - name: "global concurrency limit 0", - limit: 0, - active_slots: 0, - slot_decay_per_second: 0, - }); - const mockFetchGlobalConcurrencyLimitsAPI = ( globalConcurrencyLimits: Array, ) => { @@ -49,19 +41,6 @@ describe("global concurrency limits hooks", () => { ); }; - const mockFetchGlobalConcurrencyLimitDetailsAPI = ( - globalConcurrencyLimit: GlobalConcurrencyLimit, - ) => { - server.use( - http.get( - "http://localhost:4200/api/v2/concurrency_limits/:id_or_name", - () => { - return HttpResponse.json(globalConcurrencyLimit); - }, - ), - ); - }; - const createQueryWrapper = ({ queryClient = new QueryClient() }) => { const QueryWrapper = ({ children }: { children: React.ReactNode }) => ( {children} @@ -95,21 +74,182 @@ describe("global concurrency limits hooks", () => { /** * Data Management: - * - Asserts global concurrency limit details data is fetched based on the APIs invoked for the hook + * - Asserts global concurrency limit calls delete API and refetches updated list */ - it("is stores details data into the appropriate details query when using useQuery()", async () => { - // ------------ Mock API requests when cache is empty - const mockDetails = seedGlobalConcurrencyLimitDetails(); - mockFetchGlobalConcurrencyLimitDetailsAPI(mockDetails); + it("useDeleteGlobalConcurrencyLimit() invalidates cache and fetches updated value", async () => { + const ID_TO_DELETE = "0"; + const queryClient = new QueryClient(); + + // ------------ Mock API requests after queries are invalidated + const mockData = seedGlobalConcurrencyLimits().filter( + (limit) => limit.id !== ID_TO_DELETE, + ); + mockFetchGlobalConcurrencyLimitsAPI(mockData); + + // ------------ Initialize cache + queryClient.setQueryData( + queryKeyFactory.list(filter), + seedGlobalConcurrencyLimits(), + ); // ------------ Initialize hooks to test - const { result } = renderHook( - () => useGetGlobalConcurrencyLimit(mockDetails.id), - { wrapper: createQueryWrapper({}) }, + const { result: useListGlobalConcurrencyLimitsResult } = renderHook( + () => useListGlobalConcurrencyLimits(filter), + { wrapper: createQueryWrapper({ queryClient }) }, + ); + + const { result: useDeleteGlobalConcurrencyLimitResult } = renderHook( + useDeleteGlobalConcurrencyLimit, + { wrapper: createQueryWrapper({ queryClient }) }, + ); + + // ------------ Invoke mutation + act(() => + useDeleteGlobalConcurrencyLimitResult.current.deleteGlobalConcurrencyLimit( + ID_TO_DELETE, + ), ); // ------------ Assert - await waitFor(() => expect(result.current.isSuccess).toBe(true)); - expect(result.current.data).toEqual(mockDetails); + await waitFor(() => + expect(useDeleteGlobalConcurrencyLimitResult.current.isSuccess).toBe( + true, + ), + ); + expect(useListGlobalConcurrencyLimitsResult.current.data).toHaveLength(0); + }); + + /** + * Data Management: + * - Asserts create mutation API is called. + * - Upon create mutation API being called, cache is invalidated and asserts cache invalidation APIS are called + */ + it("useCreateGlobalConcurrencyLimit() invalidates cache and fetches updated value", async () => { + const queryClient = new QueryClient(); + const MOCK_NEW_LIMIT_ID = "1"; + const MOCK_NEW_LIMIT = { + active: true, + active_slots: 0, + denied_slots: 0, + limit: 0, + name: "global concurrency limit 1", + slot_decay_per_second: 0, + }; + + // ------------ Mock API requests after queries are invalidated + const NEW_LIMIT_DATA = { + ...MOCK_NEW_LIMIT, + id: MOCK_NEW_LIMIT_ID, + created: "2021-01-01T00:00:00Z", + updated: "2021-01-01T00:00:00Z", + active_slots: 0, + slot_decay_per_second: 0, + }; + + const mockData = [...seedGlobalConcurrencyLimits(), NEW_LIMIT_DATA]; + mockFetchGlobalConcurrencyLimitsAPI(mockData); + + // ------------ Initialize cache + queryClient.setQueryData( + queryKeyFactory.list(filter), + seedGlobalConcurrencyLimits(), + ); + + // ------------ Initialize hooks to test + const { result: useListGlobalConcurrencyLimitsResult } = renderHook( + () => useListGlobalConcurrencyLimits(filter), + { wrapper: createQueryWrapper({ queryClient }) }, + ); + const { result: useCreateGlobalConcurrencyLimitResult } = renderHook( + useCreateGlobalConcurrencyLimit, + { wrapper: createQueryWrapper({ queryClient }) }, + ); + + // ------------ Invoke mutation + act(() => + useCreateGlobalConcurrencyLimitResult.current.createGlobalConcurrencyLimit( + MOCK_NEW_LIMIT, + ), + ); + + // ------------ Assert + await waitFor(() => + expect(useCreateGlobalConcurrencyLimitResult.current.isSuccess).toBe( + true, + ), + ); + expect(useListGlobalConcurrencyLimitsResult.current.data).toHaveLength(2); + const newLimit = useListGlobalConcurrencyLimitsResult.current.data?.find( + (limit) => limit.id === MOCK_NEW_LIMIT_ID, + ); + expect(newLimit).toMatchObject(NEW_LIMIT_DATA); + }); + + /** + * Data Management: + * - Asserts update mutation API is called. + * - Upon update mutation API being called, cache invalidates global concurrency limit details cache + */ + it("useUpdateGlobalConcurrencyLimit() invalidates cache and fetches updated value", async () => { + const queryClient = new QueryClient(); + const MOCK_UPDATE_LIMIT_ID = "0"; + const UPDATED_LIMIT_BODY = { + active: true, + active_slots: 0, + denied_slots: 0, + limit: 0, + name: "global concurrency limit updated", + slot_decay_per_second: 0, + }; + const UPDATED_LIMIT = { + ...UPDATED_LIMIT_BODY, + id: MOCK_UPDATE_LIMIT_ID, + }; + + // ------------ Mock API requests after queries are invalidated + const mockData = seedGlobalConcurrencyLimits().map((limit) => + limit.id === MOCK_UPDATE_LIMIT_ID ? UPDATED_LIMIT : limit, + ); + mockFetchGlobalConcurrencyLimitsAPI(mockData); + + // ------------ Initialize cache + + queryClient.setQueryData( + queryKeyFactory.list(filter), + seedGlobalConcurrencyLimits(), + ); + + // ------------ Initialize hooks to test + const { result: useListGlobalConcurrencyLimitsResult } = renderHook( + () => useListGlobalConcurrencyLimits(filter), + { wrapper: createQueryWrapper({ queryClient }) }, + ); + + const { result: useUpdateGlobalConcurrencyLimitResult } = renderHook( + useUpdateGlobalConcurrencyLimit, + { wrapper: createQueryWrapper({ queryClient }) }, + ); + + // ------------ Invoke mutation + act(() => + useUpdateGlobalConcurrencyLimitResult.current.updateGlobalConcurrencyLimit( + { + id_or_name: MOCK_UPDATE_LIMIT_ID, + ...UPDATED_LIMIT_BODY, + }, + ), + ); + + // ------------ Assert + await waitFor(() => + expect(useUpdateGlobalConcurrencyLimitResult.current.isSuccess).toBe( + true, + ), + ); + + const limit = useListGlobalConcurrencyLimitsResult.current.data?.find( + (limit) => limit.id === MOCK_UPDATE_LIMIT_ID, + ); + expect(limit).toMatchObject(UPDATED_LIMIT); }); }); diff --git a/ui-v2/src/hooks/global-concurrency-limits.ts b/ui-v2/src/hooks/global-concurrency-limits.ts index 2542f8c0bab7..1fd78bb4fd23 100644 --- a/ui-v2/src/hooks/global-concurrency-limits.ts +++ b/ui-v2/src/hooks/global-concurrency-limits.ts @@ -1,6 +1,11 @@ import type { components } from "@/api/prefect"; import { getQueryService } from "@/api/service"; -import { queryOptions, useQuery } from "@tanstack/react-query"; +import { + queryOptions, + useMutation, + useQuery, + useQueryClient, +} from "@tanstack/react-query"; export type GlobalConcurrencyLimit = components["schemas"]["GlobalConcurrencyLimitResponse"]; @@ -9,24 +14,18 @@ export type GlobalConcurrencyLimitsFilter = /** * ``` - * 🏗️ Variable queries construction 👷 + * 🏗️ Global concurrency limits queries construction 👷 * all => ['global-concurrency-limits'] // key to match ['global-concurrency-limits', ... * list => ['global-concurrency-limits', 'list'] // key to match ['global-concurrency-limits', 'list', ... * ['global-concurrency-limits', 'list', { ...filter1 }] * ['global-concurrency-limits', 'list', { ...filter2 }] - * details => ['global-concurrency-limits', 'details'] // key to match ['global-concurrency-limits', 'details', ...] - * ['global-concurrency-limits', 'details', { ...globalConcurrencyLimit1 }] - * ['global-concurrency-limits', 'details', { ...globalConcurrencyLimit2 }] * ``` * */ -const queryKeyFactory = { +export const queryKeyFactory = { all: () => ["global-concurrency-limits"] as const, lists: () => [...queryKeyFactory.all(), "list"] as const, list: (filter: GlobalConcurrencyLimitsFilter) => [...queryKeyFactory.lists(), filter] as const, - details: () => [...queryKeyFactory.all(), "details"] as const, - detail: (id_or_name: string) => - [...queryKeyFactory.details(), id_or_name] as const, }; // ----- 🔑 Queries 🗄️ @@ -45,18 +44,6 @@ export const buildListGlobalConcurrencyLimitsQuery = ( }, }); -export const buildGetGlobalConcurrencyLimitQuery = (id_or_name: string) => - queryOptions({ - queryKey: queryKeyFactory.detail(id_or_name), - queryFn: async () => { - const res = await getQueryService().GET( - "/v2/concurrency_limits/{id_or_name}", - { params: { path: { id_or_name } } }, - ); - return res.data ?? null; - }, - }); - /** * * @param filter @@ -66,15 +53,144 @@ export const useListGlobalConcurrencyLimits = ( filter: GlobalConcurrencyLimitsFilter, ) => useQuery(buildListGlobalConcurrencyLimitsQuery(filter)); +// ----- ✍🏼 Mutations 🗄️ +// ---------------------------- + /** + * Hook for deleting a global concurrency limit * - * @param id_or_name - * @returns details about the specified global concurrency limit as a QueryResult object + * @returns Mutation object for deleting a global concurrency limit with loading/error states and trigger function + * + * @example + * ```ts + * const { deleteGlobalConcurrencyLimit } = useDeleteGlobalConcurrencyLimit(); + * + * // Delete a global concurrency limit by id or name + * deleteGlobalConcurrencyLimit('id-to-delete', { + * onSuccess: () => { + * // Handle successful deletion + * }, + * onError: (error) => { + * console.error('Failed to delete global concurrency limit:', error); + * } + * }); + * ``` */ -export const useGetGlobalConcurrencyLimit = (id_or_name: string) => - useQuery(buildGetGlobalConcurrencyLimitQuery(id_or_name)); +export const useDeleteGlobalConcurrencyLimit = () => { + const queryClient = useQueryClient(); + const { mutate: deleteGlobalConcurrencyLimit, ...rest } = useMutation({ + mutationFn: (id_or_name: string) => + getQueryService().DELETE("/v2/concurrency_limits/{id_or_name}", { + params: { path: { id_or_name } }, + }), + onSuccess: () => { + // After a successful deletion, invalidate the listing queries only to refetch + return queryClient.invalidateQueries({ + queryKey: queryKeyFactory.lists(), + }); + }, + }); + return { + deleteGlobalConcurrencyLimit, + ...rest, + }; +}; -// ----- ✍🏼 Mutations 🗄️ -// ---------------------------- +/** + * Hook for creating a new global concurrency limit + * + * @returns Mutation object for creating a global concurrency limit with loading/error states and trigger function + * + * @example + * ```ts + * const { createGlobalConcurrencyLimit, isLoading } = useCreateGlobalConcurrencyLimit(); + * + * // Create a new global concurrency limit + * createGlobalConcurrencyLimit({ + * active: true + * limit: 0 + * name: "my limit" + * slot_decay_per_second: 0 + * }, { + * onSuccess: () => { + * // Handle successful creation + * console.log('Global concurrency limit created successfully'); + * }, + * onError: (error) => { + * // Handle error + * console.error('Failed to global concurrency limit:', error); + * } + * }); + * ``` + */ +export const useCreateGlobalConcurrencyLimit = () => { + const queryClient = useQueryClient(); + const { mutate: createGlobalConcurrencyLimit, ...rest } = useMutation({ + mutationFn: (body: components["schemas"]["ConcurrencyLimitV2Create"]) => + getQueryService().POST("/v2/concurrency_limits/", { + body, + }), + onSuccess: () => { + // After a successful creation, invalidate the listing queries only to refetch + return queryClient.invalidateQueries({ + queryKey: queryKeyFactory.lists(), + }); + }, + }); + return { + createGlobalConcurrencyLimit, + ...rest, + }; +}; + +type GlobalConcurrencyLimitUpdateWithId = + components["schemas"]["ConcurrencyLimitV2Update"] & { + id_or_name: string; + }; -// TODO: +/** + * Hook for updating an existing global concurrency limit + * + * @returns Mutation object for updating a global concurrency limit with loading/error states and trigger function + * + * @example + * ```ts + * const { udateGlobalConcurrencyLimit } = useUpdateGlobalConcurrencyLimit(); + * + * // Update an existing global concurrency limit + * updateGlobalConcurrencyLimit({ + * id_or_name: "1", + * active: true + * limit: 0 + * name: "my limit" + * slot_decay_per_second: 0 + * }, { + * onSuccess: () => { + * // Handle successful update + * }, + * onError: (error) => { + * console.error('Failed to update global concurrency limit:', error); + * } + * }); + * ``` + */ +export const useUpdateGlobalConcurrencyLimit = () => { + const queryClient = useQueryClient(); + const { mutate: updateGlobalConcurrencyLimit, ...rest } = useMutation({ + mutationFn: ({ id_or_name, ...body }: GlobalConcurrencyLimitUpdateWithId) => + getQueryService().PATCH("/v2/concurrency_limits/{id_or_name}", { + body, + params: { path: { id_or_name } }, + }), + onSuccess: () => { + // After a successful creation, invalidate lists queries + return queryClient.invalidateQueries({ + queryKey: queryKeyFactory.lists(), + }); + }, + }); + return { + updateGlobalConcurrencyLimit, + ...rest, + }; +}; diff --git a/ui-v2/tests/mocks/handlers.ts b/ui-v2/tests/mocks/handlers.ts index d649a6da6aac..bf199395ac5c 100644 --- a/ui-v2/tests/mocks/handlers.ts +++ b/ui-v2/tests/mocks/handlers.ts @@ -1,5 +1,26 @@ import { http, HttpResponse } from "msw"; +const globalConcurrencyLimitsHandlers = [ + http.post("http://localhost:4200/api/v2/concurrency_limits/filter", () => { + return HttpResponse.json([]); + }), + http.post("http://localhost:4200/api/v2/concurrency_limits/", () => { + return HttpResponse.json({ status: "success" }, { status: 201 }); + }), + http.patch( + "http://localhost:4200/api/v2/concurrency_limits/:id_or_name", + () => { + return new HttpResponse(null, { status: 204 }); + }, + ), + http.delete( + "http://localhost:4200/api/v2/concurrency_limits/:id_or_name", + () => { + return HttpResponse.json({ status: 204 }); + }, + ), +]; + const variablesHandlers = [ http.post("http://localhost:4200/api/variables/", () => { return HttpResponse.json({ status: "success" }, { status: 201 }); @@ -40,5 +61,6 @@ export const handlers = [ http.post("http://localhost:4200/api/deployments/count", () => { return HttpResponse.json(1); }), + ...globalConcurrencyLimitsHandlers, ...variablesHandlers, ]; From 41ec634d98be0991b14702201802b436eac3e2be Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Thu, 5 Dec 2024 14:36:24 -0800 Subject: [PATCH 18/92] [UI v2] feat: Simplifies typography by introducing a simple typography component (#16231) --- .../src/components/ui/typography.stories.tsx | 34 +++++++++ ui-v2/src/components/ui/typography.tsx | 76 +++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 ui-v2/src/components/ui/typography.stories.tsx create mode 100644 ui-v2/src/components/ui/typography.tsx diff --git a/ui-v2/src/components/ui/typography.stories.tsx b/ui-v2/src/components/ui/typography.stories.tsx new file mode 100644 index 000000000000..1414f1f47360 --- /dev/null +++ b/ui-v2/src/components/ui/typography.stories.tsx @@ -0,0 +1,34 @@ +import type { Meta, StoryObj } from "@storybook/react"; + +import { Typography } from "./typography"; + +const meta: Meta = { + title: "UI/Typography", + component: Typography, + parameters: { + docs: { + description: { + component: "Typography is used as your basic text component", + }, + }, + }, +}; +export default meta; + +type Story = StoryObj; + +export const Usage: Story = { + render: () => { + return ( +
+ h1 Typography + h2 Typography + h3 Typography + h4 Typography + bodyLarge Typography + body Typography + bodySmall Typography +
+ ); + }, +}; diff --git a/ui-v2/src/components/ui/typography.tsx b/ui-v2/src/components/ui/typography.tsx new file mode 100644 index 000000000000..7df605bb9408 --- /dev/null +++ b/ui-v2/src/components/ui/typography.tsx @@ -0,0 +1,76 @@ +import { cn } from "@/lib/utils"; +import { forwardRef } from "react"; + +type Variant = "h1" | "h2" | "h3" | "h4" | "bodyLarge" | "body" | "bodySmall"; + +type Props = { + className?: string; + variant?: Variant; + children: React.ReactNode; +}; + +export const Typography = forwardRef( + ({ variant = "body", className, children }, ref) => { + switch (variant) { + case "h1": + return ( +

+ {children} +

+ ); + case "h2": + return ( +

+ {children} +

+ ); + case "h3": + return ( +

+ {children} +

+ ); + case "h4": + return ( +

+ {children} +

+ ); + + case "bodyLarge": + return ( +

+ {children} +

+ ); + + case "bodySmall": + return ( +

+ {children} +

+ ); + + case "body": + default: + return ( +

+ {children} +

+ ); + } + }, +); +Typography.displayName = "Typography"; From c9712965a2597e7ff6ad0a75fb30fba6c0512030 Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:45:06 -0800 Subject: [PATCH 19/92] [UI v2] feat: Moves Typography component to use cva style (#16236) --- ui-v2/src/components/ui/typography.tsx | 76 ------------------- ui-v2/src/components/ui/typography/index.ts | 1 + ui-v2/src/components/ui/typography/styles.ts | 18 +++++ .../{ => typography}/typography.stories.tsx | 0 .../components/ui/typography/typography.tsx | 34 +++++++++ 5 files changed, 53 insertions(+), 76 deletions(-) delete mode 100644 ui-v2/src/components/ui/typography.tsx create mode 100644 ui-v2/src/components/ui/typography/index.ts create mode 100644 ui-v2/src/components/ui/typography/styles.ts rename ui-v2/src/components/ui/{ => typography}/typography.stories.tsx (100%) create mode 100644 ui-v2/src/components/ui/typography/typography.tsx diff --git a/ui-v2/src/components/ui/typography.tsx b/ui-v2/src/components/ui/typography.tsx deleted file mode 100644 index 7df605bb9408..000000000000 --- a/ui-v2/src/components/ui/typography.tsx +++ /dev/null @@ -1,76 +0,0 @@ -import { cn } from "@/lib/utils"; -import { forwardRef } from "react"; - -type Variant = "h1" | "h2" | "h3" | "h4" | "bodyLarge" | "body" | "bodySmall"; - -type Props = { - className?: string; - variant?: Variant; - children: React.ReactNode; -}; - -export const Typography = forwardRef( - ({ variant = "body", className, children }, ref) => { - switch (variant) { - case "h1": - return ( -

- {children} -

- ); - case "h2": - return ( -

- {children} -

- ); - case "h3": - return ( -

- {children} -

- ); - case "h4": - return ( -

- {children} -

- ); - - case "bodyLarge": - return ( -

- {children} -

- ); - - case "bodySmall": - return ( -

- {children} -

- ); - - case "body": - default: - return ( -

- {children} -

- ); - } - }, -); -Typography.displayName = "Typography"; diff --git a/ui-v2/src/components/ui/typography/index.ts b/ui-v2/src/components/ui/typography/index.ts new file mode 100644 index 000000000000..70b4ee91e8e9 --- /dev/null +++ b/ui-v2/src/components/ui/typography/index.ts @@ -0,0 +1 @@ +export { Typography } from "./typography"; diff --git a/ui-v2/src/components/ui/typography/styles.ts b/ui-v2/src/components/ui/typography/styles.ts new file mode 100644 index 000000000000..356d5d3355a3 --- /dev/null +++ b/ui-v2/src/components/ui/typography/styles.ts @@ -0,0 +1,18 @@ +import { cva } from "class-variance-authority"; + +export const typographyVariants = cva("", { + variants: { + variant: { + h1: "text-4xl font-extrabold tracking-tigh", + h2: "text-3xl font-semibold tracking-tight", + h3: "text-2xl font-semibold tracking-tight", + h4: "text-xl font-semibold tracking-tight", + bodyLarge: "text-lg", + body: "text-base", + bodySmall: "text-sm", + }, + }, + defaultVariants: { + variant: "body", + }, +}); diff --git a/ui-v2/src/components/ui/typography.stories.tsx b/ui-v2/src/components/ui/typography/typography.stories.tsx similarity index 100% rename from ui-v2/src/components/ui/typography.stories.tsx rename to ui-v2/src/components/ui/typography/typography.stories.tsx diff --git a/ui-v2/src/components/ui/typography/typography.tsx b/ui-v2/src/components/ui/typography/typography.tsx new file mode 100644 index 000000000000..dcfc206931c3 --- /dev/null +++ b/ui-v2/src/components/ui/typography/typography.tsx @@ -0,0 +1,34 @@ +import { cn } from "@/lib/utils"; +import { createElement, forwardRef } from "react"; + +import { typographyVariants } from "./styles"; + +const VARIANTS_TO_ELEMENT_MAP = { + h1: "h1", + h2: "h2", + h3: "h3", + h4: "h4", + bodyLarge: "p", + body: "p", + bodySmall: "p", +} as const; + +type Variant = "h1" | "h2" | "h3" | "h4" | "bodyLarge" | "body" | "bodySmall"; + +type Props = { + className?: string; + variant?: Variant; + children: React.ReactNode; +}; + +export const Typography = forwardRef( + ({ className, variant = "body", ...props }, ref) => { + return createElement(VARIANTS_TO_ELEMENT_MAP[variant], { + className: cn(typographyVariants({ variant }), className), + ref, + ...props, + }); + }, +); + +Typography.displayName = "Typography"; From a8bb685772fcbec18ab58662a95f8d8476869a30 Mon Sep 17 00:00:00 2001 From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:55:27 -0800 Subject: [PATCH 20/92] [UI v2] feat: Starts page layout for concurrency page (#16226) --- .../concurrency/concurrency-constants.ts | 5 +++ .../concurrency/concurrency-page.tsx | 27 +++++++++++++++ .../concurrency/concurrency-tabs.tsx | 34 +++++++++++++++++++ ...bal-concurrency-limit-empty-state.test.tsx | 2 +- .../global-concurrency-limit-empty-state.tsx | 6 ++-- .../global-concurrency-limits-header.tsx | 18 ++++++++++ .../global-concurrency-view/index.tsx | 18 ++++++++++ .../task-run-concurrenct-view/index.tsx | 3 ++ ...run-concurrency-limit-empty-state.test.tsx | 2 +- ...task-run-concurrency-limit-empty-state.tsx | 6 ++-- ui-v2/src/components/layouts/MainLayout.tsx | 4 +-- ui-v2/src/components/variables/layout.tsx | 2 +- ui-v2/src/routes/concurrency-limits.tsx | 4 ++- 13 files changed, 119 insertions(+), 12 deletions(-) create mode 100644 ui-v2/src/components/concurrency/concurrency-constants.ts create mode 100644 ui-v2/src/components/concurrency/concurrency-page.tsx create mode 100644 ui-v2/src/components/concurrency/concurrency-tabs.tsx rename ui-v2/src/components/concurrency/{ => global-concurrency-view}/global-concurrency-limit-empty-state.test.tsx (89%) rename ui-v2/src/components/concurrency/{ => global-concurrency-view}/global-concurrency-limit-empty-state.tsx (86%) create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/index.tsx create mode 100644 ui-v2/src/components/concurrency/task-run-concurrenct-view/index.tsx rename ui-v2/src/components/concurrency/{ => task-run-concurrenct-view}/task-run-concurrency-limit-empty-state.test.tsx (90%) rename ui-v2/src/components/concurrency/{ => task-run-concurrenct-view}/task-run-concurrency-limit-empty-state.tsx (86%) diff --git a/ui-v2/src/components/concurrency/concurrency-constants.ts b/ui-v2/src/components/concurrency/concurrency-constants.ts new file mode 100644 index 000000000000..2ffc86f41409 --- /dev/null +++ b/ui-v2/src/components/concurrency/concurrency-constants.ts @@ -0,0 +1,5 @@ +export const TAB_OPTIONS = { + Global: "Global", + "Task Run": "Task Run", +} as const; +export type TabOptions = keyof typeof TAB_OPTIONS; diff --git a/ui-v2/src/components/concurrency/concurrency-page.tsx b/ui-v2/src/components/concurrency/concurrency-page.tsx new file mode 100644 index 000000000000..17012f025ba1 --- /dev/null +++ b/ui-v2/src/components/concurrency/concurrency-page.tsx @@ -0,0 +1,27 @@ +import { useState } from "react"; + +import { Typography } from "@/components//ui/typography"; + +import { TAB_OPTIONS, TabOptions } from "./concurrency-constants"; +import { ConcurrencyTabs } from "./concurrency-tabs"; +import { GlobalConcurrencyView } from "./global-concurrency-view"; +import { TaskRunConcurrencyView } from "./task-run-concurrenct-view"; + +export const ConcurrencyPage = (): JSX.Element => { + // TODO: Use URL query instead + const [tab, setTab] = useState(TAB_OPTIONS.Global); + + return ( +
+ Concurrency +
+ } + taskRunView={} + /> +
+
+ ); +}; diff --git a/ui-v2/src/components/concurrency/concurrency-tabs.tsx b/ui-v2/src/components/concurrency/concurrency-tabs.tsx new file mode 100644 index 000000000000..b3396114bca5 --- /dev/null +++ b/ui-v2/src/components/concurrency/concurrency-tabs.tsx @@ -0,0 +1,34 @@ +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { TAB_OPTIONS, TabOptions } from "./concurrency-constants"; + +type Props = { + globalView: React.ReactNode; + onValueChange: (value: TabOptions) => void; + taskRunView: React.ReactNode; + value: TabOptions; +}; + +// TODO: Move Tabs for navigation to a generic styled component + +export const ConcurrencyTabs = ({ + globalView, + onValueChange, + taskRunView, + value, +}: Props): JSX.Element => { + return ( + onValueChange(value as TabOptions)} + > + + {TAB_OPTIONS.Global} + {TAB_OPTIONS["Task Run"]} + + {globalView} + {taskRunView} + + ); +}; diff --git a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limit-empty-state.test.tsx similarity index 89% rename from ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx rename to ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limit-empty-state.test.tsx index d26184237fc3..48cf3bb962a1 100644 --- a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.test.tsx +++ b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limit-empty-state.test.tsx @@ -9,7 +9,7 @@ describe("GlobalConcurrencyLimitEmptyState", () => { const mockFn = vi.fn(); - render(); + render(); await user.click( screen.getByRole("button", { name: /Add Concurrency Limit/i }), ); diff --git a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limit-empty-state.tsx similarity index 86% rename from ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx rename to ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limit-empty-state.tsx index 1759a711e465..5ed33c7800c8 100644 --- a/ui-v2/src/components/concurrency/global-concurrency-limit-empty-state.tsx +++ b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limit-empty-state.tsx @@ -10,9 +10,9 @@ import { import { Icon } from "@/components/ui/icons"; type Props = { - onClick: () => void; + onAdd: () => void; }; -export const GlobalConcurrencyLimitEmptyState = ({ onClick }: Props) => ( +export const GlobalConcurrencyLimitEmptyState = ({ onAdd }: Props) => ( Add a concurrency limit @@ -21,7 +21,7 @@ export const GlobalConcurrencyLimitEmptyState = ({ onClick }: Props) => ( operation where you want to control concurrency. - diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx new file mode 100644 index 000000000000..d64699434c3a --- /dev/null +++ b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx @@ -0,0 +1,18 @@ +import { Button } from "@/components/ui/button"; +import { Icon } from "@/components/ui/icons"; +import { Typography } from "@/components/ui/typography"; + +type Props = { + onAdd: () => void; +}; + +export const GlobalConcurrencyLimitsHeader = ({ onAdd }: Props) => { + return ( +
+ Global Concurrency Limits + +
+ ); +}; diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx new file mode 100644 index 000000000000..20b5b7b78a63 --- /dev/null +++ b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx @@ -0,0 +1,18 @@ +import { useState } from "react"; +import { GlobalConcurrencyLimitsHeader } from "./global-concurrency-limits-header"; + +export const GlobalConcurrencyView = () => { + const [showAddDialog, setShowAddDialog] = useState(false); + + const openAddDialog = () => setShowAddDialog(true); + const closeAddDialog = () => setShowAddDialog(false); + + return ( + <> +
+ +
+ {showAddDialog &&
TODO: DIALOG
} + + ); +}; diff --git a/ui-v2/src/components/concurrency/task-run-concurrenct-view/index.tsx b/ui-v2/src/components/concurrency/task-run-concurrenct-view/index.tsx new file mode 100644 index 000000000000..8902ed44ec37 --- /dev/null +++ b/ui-v2/src/components/concurrency/task-run-concurrenct-view/index.tsx @@ -0,0 +1,3 @@ +export const TaskRunConcurrencyView = () => { + return
🚧🚧 Pardon our dust! 🚧🚧
; +}; diff --git a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx b/ui-v2/src/components/concurrency/task-run-concurrenct-view/task-run-concurrency-limit-empty-state.test.tsx similarity index 90% rename from ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx rename to ui-v2/src/components/concurrency/task-run-concurrenct-view/task-run-concurrency-limit-empty-state.test.tsx index 34b64a9531b6..d3bbdf8127aa 100644 --- a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.test.tsx +++ b/ui-v2/src/components/concurrency/task-run-concurrenct-view/task-run-concurrency-limit-empty-state.test.tsx @@ -9,7 +9,7 @@ describe("TaskRunConcurrencyLimitEmptyState", () => { const mockFn = vi.fn(); - render(); + render(); await user.click( screen.getByRole("button", { name: /Add Concurrency Limit/i }), ); diff --git a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx b/ui-v2/src/components/concurrency/task-run-concurrenct-view/task-run-concurrency-limit-empty-state.tsx similarity index 86% rename from ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx rename to ui-v2/src/components/concurrency/task-run-concurrenct-view/task-run-concurrency-limit-empty-state.tsx index 7d16f4977196..f89e28bdd934 100644 --- a/ui-v2/src/components/concurrency/task-run-concurrency-limit-empty-state.tsx +++ b/ui-v2/src/components/concurrency/task-run-concurrenct-view/task-run-concurrency-limit-empty-state.tsx @@ -10,9 +10,9 @@ import { import { Icon } from "@/components/ui/icons"; type Props = { - onClick: () => void; + onAdd: () => void; }; -export const TaskRunConcurrencyLimitEmptyState = ({ onClick }: Props) => ( +export const TaskRunConcurrencyLimitEmptyState = ({ onAdd }: Props) => ( @@ -23,7 +23,7 @@ export const TaskRunConcurrencyLimitEmptyState = ({ onClick }: Props) => ( simultaneously with a given tag. - diff --git a/ui-v2/src/components/layouts/MainLayout.tsx b/ui-v2/src/components/layouts/MainLayout.tsx index 6c6b89cebe14..5840d6513786 100644 --- a/ui-v2/src/components/layouts/MainLayout.tsx +++ b/ui-v2/src/components/layouts/MainLayout.tsx @@ -1,12 +1,12 @@ -import { SidebarProvider } from "@/components/ui/sidebar"; import { AppSidebar } from "@/components/ui/app-sidebar"; +import { SidebarProvider } from "@/components/ui/sidebar"; import { Toaster } from "../ui/toaster"; export function MainLayout({ children }: { children: React.ReactNode }) { return ( -
{children}
+
{children}
); diff --git a/ui-v2/src/components/variables/layout.tsx b/ui-v2/src/components/variables/layout.tsx index ee2ba683846b..f483cab72960 100644 --- a/ui-v2/src/components/variables/layout.tsx +++ b/ui-v2/src/components/variables/layout.tsx @@ -14,7 +14,7 @@ export const VariablesLayout = ({ children: React.ReactNode; }) => { return ( -
+
diff --git a/ui-v2/src/routes/concurrency-limits.tsx b/ui-v2/src/routes/concurrency-limits.tsx index 1cd00ccde0bc..e425901c1452 100644 --- a/ui-v2/src/routes/concurrency-limits.tsx +++ b/ui-v2/src/routes/concurrency-limits.tsx @@ -1,9 +1,11 @@ import { createFileRoute } from "@tanstack/react-router"; +import { ConcurrencyPage } from "@/components/concurrency/concurrency-page"; + export const Route = createFileRoute("/concurrency-limits")({ component: RouteComponent, }); function RouteComponent() { - return "🚧🚧 Pardon our dust! 🚧🚧"; + return ; } From fda6a4ed5edbb287e213961125c3a3a74bc205e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Patrick=20D=C3=BCggelin?= Date: Fri, 6 Dec 2024 16:32:06 +0100 Subject: [PATCH 21/92] Include info about registering built-in blocks (#16244) Co-authored-by: Alexander Streed --- docs/v3/develop/blocks.mdx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/v3/develop/blocks.mdx b/docs/v3/develop/blocks.mdx index eb8eada8c7c4..075f11e6d970 100644 --- a/docs/v3/develop/blocks.mdx +++ b/docs/v3/develop/blocks.mdx @@ -405,6 +405,11 @@ You can create and use these block types through the UI without installing any a | SMB | `smb` | Store data as a file on a SMB share. | | Twilio SMS | `twilio-sms` | Send notifications through Twilio SMS. | + +Built-in blocks should be registered the first time you start a Prefect server. If the auto-registration fails, you can manually register the blocks using `prefect blocks register`. + +For example, to register all built-in notification blocks, run `prefect block register -m prefect.blocks.notifications`. + The `S3`, `Azure`, `GCS`, and `GitHub` blocks are deprecated in favor of the corresponding `S3Bucket`, From 9ff7d0bebb55a8cb730c8c6262ea56c5f606d053 Mon Sep 17 00:00:00 2001 From: Alexander Streed Date: Fri, 6 Dec 2024 11:22:47 -0600 Subject: [PATCH 22/92] Move UI pre-commit hooks from `husky` to `pre-commit` (#16246) --- .pre-commit-config.yaml | 43 +- ui-v2/.husky/pre-commit | 3 - ui-v2/biome.json | 2 +- ui-v2/package-lock.json | 362 +- ui-v2/package.json | 2 - ui-v2/src/api/prefect.ts | 31527 ++++++++++++++++++------------------- 6 files changed, 15868 insertions(+), 16071 deletions(-) delete mode 100644 ui-v2/.husky/pre-commit diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 30a626c49756..bf4a16c4ca6a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -45,9 +45,6 @@ repos: src/prefect/server/events/.*| scripts/generate_mintlify_openapi_docs.py )$ - - - repo: local - hooks: - id: generate-settings-schema name: Generating Settings Schema language: system @@ -59,9 +56,6 @@ repos: src/prefect/settings/models/.*| scripts/generate_settings_schema.py )$ - - - repo: local - hooks: - id: generate-settings-ref name: Generating Settings Reference language: system @@ -73,3 +67,40 @@ repos: src/prefect/settings/models/.*| scripts/generate_settings_ref.py )$ + - id: lint-ui-v2 + name: Lint UI v2 + language: system + entry: sh + args: ['-c', 'cd ui-v2 && npm i --no-upgrade --silent --no-progress && npm run lint'] + files: | + (?x)^( + .pre-commit-config.yaml| + ui-v2/.* + )$ + pass_filenames: false + - id: format-ui-v2 + name: Format UI v2 + language: system + entry: sh + args: ['-c', 'cd ui-v2 && npm i --no-upgrade --silent --no-progress && npm run format'] + files: | + (?x)^( + .pre-commit-config.yaml| + ui-v2/.* + )$ + pass_filenames: false + - id: service-sync-ui-v2-openapi + name: Sync UI v2 OpenAPI + language: system + entry: sh + args: ['-c', 'cd ui-v2 && npm i --no-upgrade --silent --no-progress && npm run service-sync'] + files: | + (?x)^( + .pre-commit-config.yaml| + .pre-commit-config.yaml| + src/prefect/server/api/.*| + src/prefect/server/schemas/.*| + src/prefect/server/events/.*| + ui-v2/package.json + )$ + pass_filenames: false \ No newline at end of file diff --git a/ui-v2/.husky/pre-commit b/ui-v2/.husky/pre-commit deleted file mode 100644 index 9cb33f011be4..000000000000 --- a/ui-v2/.husky/pre-commit +++ /dev/null @@ -1,3 +0,0 @@ -cd ui-v2 -npm run lint -npm run format diff --git a/ui-v2/biome.json b/ui-v2/biome.json index 4af4c5e34f5c..9c7fdb62c7f0 100644 --- a/ui-v2/biome.json +++ b/ui-v2/biome.json @@ -7,7 +7,7 @@ }, "files": { "ignoreUnknown": false, - "ignore": [] + "ignore": ["src/api/prefect.ts"] }, "formatter": { "enabled": true, diff --git a/ui-v2/package-lock.json b/ui-v2/package-lock.json index 3f1cee99dea3..ba67571b4e1f 100644 --- a/ui-v2/package-lock.json +++ b/ui-v2/package-lock.json @@ -76,7 +76,6 @@ "eslint-plugin-testing-library": "^6.4.0", "eslint-plugin-unused-imports": "^4.1.4", "globals": "^15.10.0", - "husky": "^9.1.6", "jsdom": "^25.0.1", "msw": "^2.6.0", "postcss": "^8.4.47", @@ -3785,152 +3784,6 @@ "eslint": "^8.57.0 || ^9.0.0" } }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/@typescript-eslint/scope-manager": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.16.0.tgz", - "integrity": "sha512-mwsZWubQvBki2t5565uxF0EYvG+FwdFb8bMtDuGQLdCCnGPrDEDvm1gtfynuKlnpzeBRqdFCkMf9jg1fnAK8sg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/@typescript-eslint/types": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.16.0.tgz", - "integrity": "sha512-NzrHj6thBAOSE4d9bsuRNMvk+BvaQvmY4dDglgkgGC0EW/tB3Kelnp3tAKH87GEwzoxgeQn9fNGRyFJM/xd+GQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.16.0.tgz", - "integrity": "sha512-E2+9IzzXMc1iaBy9zmo+UYvluE3TW7bCGWSF41hVWUE01o8nzr1rvOQYSxelxr6StUvRcTMe633eY8mXASMaNw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/@typescript-eslint/utils": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.16.0.tgz", - "integrity": "sha512-C1zRy/mOL8Pj157GiX4kaw7iyRLKfJXBR3L82hk5kS/GyHcOFmy4YUq/zfZti72I9wnuQtA/+xzft4wCC8PJdA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/typescript-estree": "8.16.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.16.0.tgz", - "integrity": "sha512-pq19gbaMOmFE3CbL0ZB8J8BFCo2ckfHBfaIsaOZgBIF4EoISJIdLX5xRhd0FGB0LlHReNRuzoJoMGpTjq8F2CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.16.0", - "eslint-visitor-keys": "^4.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@tanstack/eslint-plugin-query/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@tanstack/eslint-plugin-router": { "version": "1.77.7", "resolved": "https://registry.npmjs.org/@tanstack/eslint-plugin-router/-/eslint-plugin-router-1.77.7.tgz", @@ -4568,6 +4421,28 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/utils": { + "version": "8.12.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz", + "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.12.2", + "@typescript-eslint/types": "8.12.2", + "@typescript-eslint/typescript-estree": "8.12.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, "node_modules/@typescript-eslint/parser": { "version": "8.12.2", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.12.2.tgz", @@ -4637,6 +4512,28 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/utils": { + "version": "8.12.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz", + "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.12.2", + "@typescript-eslint/types": "8.12.2", + "@typescript-eslint/typescript-estree": "8.12.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, "node_modules/@typescript-eslint/types": { "version": "8.12.2", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz", @@ -4715,15 +4612,15 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz", - "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.17.0.tgz", + "integrity": "sha512-bQC8BnEkxqG8HBGKwG9wXlZqg37RKSMY7v/X8VEWD8JG2JuTHuNK0VFvMPMUKQcbk6B+tf05k+4AShAEtCtJ/w==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.12.2", - "@typescript-eslint/types": "8.12.2", - "@typescript-eslint/typescript-estree": "8.12.2" + "@typescript-eslint/scope-manager": "8.17.0", + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/typescript-estree": "8.17.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4734,6 +4631,122 @@ }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz", + "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/visitor-keys": "8.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz", + "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz", + "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/visitor-keys": "8.17.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz", + "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.17.0", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/@typescript-eslint/visitor-keys": { @@ -6527,9 +6540,9 @@ } }, "node_modules/eslint-plugin-testing-library": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-testing-library/-/eslint-plugin-testing-library-6.4.0.tgz", - "integrity": "sha512-yeWF+YgCgvNyPNI9UKnG0FjeE2sk93N/3lsKqcmR8dSfeXJwFT5irnWo7NjLf152HkRzfoFjh3LsBUrhvFz4eA==", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-testing-library/-/eslint-plugin-testing-library-6.5.0.tgz", + "integrity": "sha512-Ls5TUfLm5/snocMAOlofSOJxNN0aKqwTlco7CrNtMjkTdQlkpSMaeTCDHCuXfzrI97xcx2rSCNeKeJjtpkNC1w==", "dev": true, "dependencies": { "@typescript-eslint/utils": "^5.62.0" @@ -7386,21 +7399,6 @@ "node": ">= 14" } }, - "node_modules/husky": { - "version": "9.1.6", - "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.6.tgz", - "integrity": "sha512-sqbjZKK7kf44hfdE94EoX8MZNk0n7HeW37O4YrVGCF4wzgQjp+akPAkfUK5LZ6KuR/6sqeAVuXHji+RzQgOn5A==", - "dev": true, - "bin": { - "husky": "bin.js" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/typicode" - } - }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -10492,6 +10490,28 @@ } } }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/utils": { + "version": "8.12.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz", + "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.12.2", + "@typescript-eslint/types": "8.12.2", + "@typescript-eslint/typescript-estree": "8.12.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, "node_modules/unbox-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", diff --git a/ui-v2/package.json b/ui-v2/package.json index 0a8658674491..46fe5dbd6922 100644 --- a/ui-v2/package.json +++ b/ui-v2/package.json @@ -13,7 +13,6 @@ "format": "biome format --write", "preview": "vite preview", "service-sync": "uv run ../scripts/generate_oss_openapi_schema.py && npx openapi-typescript oss_schema.json -o src/api/prefect.ts && rm oss_schema.json", - "prepare": "cd .. && husky ui-v2/.husky", "storybook": "storybook dev -p 6006", "build-storybook": "storybook build" }, @@ -86,7 +85,6 @@ "eslint-plugin-testing-library": "^6.4.0", "eslint-plugin-unused-imports": "^4.1.4", "globals": "^15.10.0", - "husky": "^9.1.6", "jsdom": "^25.0.1", "msw": "^2.6.0", "postcss": "^8.4.47", diff --git a/ui-v2/src/api/prefect.ts b/ui-v2/src/api/prefect.ts index f49fc08bb2ba..4879b49f1811 100644 --- a/ui-v2/src/api/prefect.ts +++ b/ui-v2/src/api/prefect.ts @@ -4,15898 +4,15649 @@ */ export interface paths { - "/health": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Health Check */ - get: operations["health_check_health_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/version": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Server Version */ - get: operations["server_version_version_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flows/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Flow - * @description Gracefully creates a new flow from the provided schema. If a flow with the - * same name already exists, the existing flow is returned. - */ - post: operations["create_flow_flows__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flows/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow - * @description Get a flow by id. - */ - get: operations["read_flow_flows__id__get"]; - put?: never; - post?: never; - /** - * Delete Flow - * @description Delete a flow by id. - */ - delete: operations["delete_flow_flows__id__delete"]; - options?: never; - head?: never; - /** - * Update Flow - * @description Updates a flow. - */ - patch: operations["update_flow_flows__id__patch"]; - trace?: never; - }; - "/flows/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Flows - * @description Count flows. - */ - post: operations["count_flows_flows_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flows/name/{name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow By Name - * @description Get a flow by name. - */ - get: operations["read_flow_by_name_flows_name__name__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flows/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Flows - * @description Query for flows. - */ - post: operations["read_flows_flows_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flows/paginate": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Paginate Flows - * @description Pagination query for flows. - */ - post: operations["paginate_flows_flows_paginate_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Flow Run - * @description Create a flow run. If a flow run with the same flow_id and - * idempotency key already exists, the existing flow run will be returned. - * - * If no state is provided, the flow run will be created in a PENDING state. - */ - post: operations["create_flow_run_flow_runs__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run - * @description Get a flow run by id. - */ - get: operations["read_flow_run_flow_runs__id__get"]; - put?: never; - post?: never; - /** - * Delete Flow Run - * @description Delete a flow run by id. - */ - delete: operations["delete_flow_run_flow_runs__id__delete"]; - options?: never; - head?: never; - /** - * Update Flow Run - * @description Updates a flow run. - */ - patch: operations["update_flow_run_flow_runs__id__patch"]; - trace?: never; - }; - "/flow_runs/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Flow Runs - * @description Query for flow runs. - */ - post: operations["count_flow_runs_flow_runs_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/lateness": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Average Flow Run Lateness - * @description Query for average flow-run lateness in seconds. - */ - post: operations["average_flow_run_lateness_flow_runs_lateness_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/history": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Flow Run History - * @description Query for flow run history data across a given range and interval. - */ - post: operations["flow_run_history_flow_runs_history_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/graph": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run Graph V1 - * @description Get a task run dependency map for a given flow run. - */ - get: operations["read_flow_run_graph_v1_flow_runs__id__graph_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/graph-v2": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run Graph V2 - * @description Get a graph of the tasks and subflow runs for the given flow run - */ - get: operations["read_flow_run_graph_v2_flow_runs__id__graph_v2_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/resume": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Resume Flow Run - * @description Resume a paused flow run. - */ - post: operations["resume_flow_run_flow_runs__id__resume_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Flow Runs - * @description Query for flow runs. - */ - post: operations["read_flow_runs_flow_runs_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/set_state": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Set Flow Run State - * @description Set a flow run state, invoking any orchestration rules. - */ - post: operations["set_flow_run_state_flow_runs__id__set_state_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/input": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Flow Run Input - * @description Create a key/value input for a flow run. - */ - post: operations["create_flow_run_input_flow_runs__id__input_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/input/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Filter Flow Run Input - * @description Filter flow run inputs by key prefix - */ - post: operations["filter_flow_run_input_flow_runs__id__input_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/input/{key}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run Input - * @description Create a value from a flow run input - */ - get: operations["read_flow_run_input_flow_runs__id__input__key__get"]; - put?: never; - post?: never; - /** - * Delete Flow Run Input - * @description Delete a flow run input - */ - delete: operations["delete_flow_run_input_flow_runs__id__input__key__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/paginate": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Paginate Flow Runs - * @description Pagination query for flow runs. - */ - post: operations["paginate_flow_runs_flow_runs_paginate_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_runs/{id}/logs/download": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Download Logs - * @description Download all flow run logs as a CSV file, collecting all logs until there are no more logs to retrieve. - */ - get: operations["download_logs_flow_runs__id__logs_download_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_runs/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Task Run - * @description Create a task run. If a task run with the same flow_run_id, - * task_key, and dynamic_key already exists, the existing task - * run will be returned. - * - * If no state is provided, the task run will be created in a PENDING state. - */ - post: operations["create_task_run_task_runs__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_runs/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Task Run - * @description Get a task run by id. - */ - get: operations["read_task_run_task_runs__id__get"]; - put?: never; - post?: never; - /** - * Delete Task Run - * @description Delete a task run by id. - */ - delete: operations["delete_task_run_task_runs__id__delete"]; - options?: never; - head?: never; - /** - * Update Task Run - * @description Updates a task run. - */ - patch: operations["update_task_run_task_runs__id__patch"]; - trace?: never; - }; - "/task_runs/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Task Runs - * @description Count task runs. - */ - post: operations["count_task_runs_task_runs_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_runs/history": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Task Run History - * @description Query for task run history data across a given range and interval. - */ - post: operations["task_run_history_task_runs_history_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_runs/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Task Runs - * @description Query for task runs. - */ - post: operations["read_task_runs_task_runs_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_runs/{id}/set_state": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Set Task Run State - * @description Set a task run state, invoking any orchestration rules. - */ - post: operations["set_task_run_state_task_runs__id__set_state_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_run_states/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run State - * @description Get a flow run state by id. - */ - get: operations["read_flow_run_state_flow_run_states__id__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_run_states/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run States - * @description Get states associated with a flow run. - */ - get: operations["read_flow_run_states_flow_run_states__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_run_states/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Task Run State - * @description Get a task run state by id. - */ - get: operations["read_task_run_state_task_run_states__id__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_run_states/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Task Run States - * @description Get states associated with a task run. - */ - get: operations["read_task_run_states_task_run_states__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_run_notification_policies/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Flow Run Notification Policy - * @description Creates a new flow run notification policy. - */ - post: operations["create_flow_run_notification_policy_flow_run_notification_policies__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/flow_run_notification_policies/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Flow Run Notification Policy - * @description Get a flow run notification policy by id. - */ - get: operations["read_flow_run_notification_policy_flow_run_notification_policies__id__get"]; - put?: never; - post?: never; - /** - * Delete Flow Run Notification Policy - * @description Delete a flow run notification policy by id. - */ - delete: operations["delete_flow_run_notification_policy_flow_run_notification_policies__id__delete"]; - options?: never; - head?: never; - /** - * Update Flow Run Notification Policy - * @description Updates an existing flow run notification policy. - */ - patch: operations["update_flow_run_notification_policy_flow_run_notification_policies__id__patch"]; - trace?: never; - }; - "/flow_run_notification_policies/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Flow Run Notification Policies - * @description Query for flow run notification policies. - */ - post: operations["read_flow_run_notification_policies_flow_run_notification_policies_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Deployment - * @description Gracefully creates a new deployment from the provided schema. If a deployment with - * the same name and flow_id already exists, the deployment is updated. - * - * If the deployment has an active schedule, flow runs will be scheduled. - * When upserting, any scheduled runs from the existing deployment will be deleted. - */ - post: operations["create_deployment_deployments__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Deployment - * @description Get a deployment by id. - */ - get: operations["read_deployment_deployments__id__get"]; - put?: never; - post?: never; - /** - * Delete Deployment - * @description Delete a deployment by id. - */ - delete: operations["delete_deployment_deployments__id__delete"]; - options?: never; - head?: never; - /** Update Deployment */ - patch: operations["update_deployment_deployments__id__patch"]; - trace?: never; - }; - "/deployments/name/{flow_name}/{deployment_name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Deployment By Name - * @description Get a deployment using the name of the flow and the deployment. - */ - get: operations["read_deployment_by_name_deployments_name__flow_name___deployment_name__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Deployments - * @description Query for deployments. - */ - post: operations["read_deployments_deployments_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/paginate": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Paginate Deployments - * @description Pagination query for flow runs. - */ - post: operations["paginate_deployments_deployments_paginate_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/get_scheduled_flow_runs": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Get Scheduled Flow Runs For Deployments - * @description Get scheduled runs for a set of deployments. Used by a runner to poll for work. - */ - post: operations["get_scheduled_flow_runs_for_deployments_deployments_get_scheduled_flow_runs_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Deployments - * @description Count deployments. - */ - post: operations["count_deployments_deployments_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/schedule": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Schedule Deployment - * @description Schedule runs for a deployment. For backfills, provide start/end times in the past. - * - * This function will generate the minimum number of runs that satisfy the min - * and max times, and the min and max counts. Specifically, the following order - * will be respected. - * - * - Runs will be generated starting on or after the `start_time` - * - No more than `max_runs` runs will be generated - * - No runs will be generated after `end_time` is reached - * - At least `min_runs` runs will be generated - * - Runs will be generated until at least `start_time + min_time` is reached - */ - post: operations["schedule_deployment_deployments__id__schedule_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/resume_deployment": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Resume Deployment - * @description Set a deployment schedule to active. Runs will be scheduled immediately. - */ - post: operations["resume_deployment_deployments__id__resume_deployment_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/pause_deployment": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Pause Deployment - * @description Set a deployment schedule to inactive. Any auto-scheduled runs still in a Scheduled - * state will be deleted. - */ - post: operations["pause_deployment_deployments__id__pause_deployment_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/create_flow_run": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Flow Run From Deployment - * @description Create a flow run from a deployment. - * - * Any parameters not provided will be inferred from the deployment's parameters. - * If tags are not provided, the deployment's tags will be used. - * - * If no state is provided, the flow run will be created in a SCHEDULED state. - */ - post: operations["create_flow_run_from_deployment_deployments__id__create_flow_run_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/work_queue_check": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Work Queue Check For Deployment - * @deprecated - * @description Get list of work-queues that are able to pick up the specified deployment. - * - * This endpoint is intended to be used by the UI to provide users warnings - * about deployments that are unable to be executed because there are no work - * queues that will pick up their runs, based on existing filter criteria. It - * may be deprecated in the future because there is not a strict relationship - * between work queues and deployments. - */ - get: operations["work_queue_check_for_deployment_deployments__id__work_queue_check_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/schedules": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Deployment Schedules */ - get: operations["read_deployment_schedules_deployments__id__schedules_get"]; - put?: never; - /** Create Deployment Schedules */ - post: operations["create_deployment_schedules_deployments__id__schedules_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/deployments/{id}/schedules/{schedule_id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - post?: never; - /** Delete Deployment Schedule */ - delete: operations["delete_deployment_schedule_deployments__id__schedules__schedule_id__delete"]; - options?: never; - head?: never; - /** Update Deployment Schedule */ - patch: operations["update_deployment_schedule_deployments__id__schedules__schedule_id__patch"]; - trace?: never; - }; - "/saved_searches/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - /** - * Create Saved Search - * @description Gracefully creates a new saved search from the provided schema. - * - * If a saved search with the same name already exists, the saved search's fields are - * replaced. - */ - put: operations["create_saved_search_saved_searches__put"]; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/saved_searches/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Saved Search - * @description Get a saved search by id. - */ - get: operations["read_saved_search_saved_searches__id__get"]; - put?: never; - post?: never; - /** - * Delete Saved Search - * @description Delete a saved search by id. - */ - delete: operations["delete_saved_search_saved_searches__id__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/saved_searches/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Saved Searches - * @description Query for saved searches. - */ - post: operations["read_saved_searches_saved_searches_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/logs/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Logs - * @description Create new logs from the provided schema. - */ - post: operations["create_logs_logs__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/logs/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Logs - * @description Query for logs. - */ - post: operations["read_logs_logs_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Create Concurrency Limit */ - post: operations["create_concurrency_limit_concurrency_limits__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Concurrency Limit - * @description Get a concurrency limit by id. - * - * The `active slots` field contains a list of TaskRun IDs currently using a - * concurrency slot for the specified tag. - */ - get: operations["read_concurrency_limit_concurrency_limits__id__get"]; - put?: never; - post?: never; - /** Delete Concurrency Limit */ - delete: operations["delete_concurrency_limit_concurrency_limits__id__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/tag/{tag}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Concurrency Limit By Tag - * @description Get a concurrency limit by tag. - * - * The `active slots` field contains a list of TaskRun IDs currently using a - * concurrency slot for the specified tag. - */ - get: operations["read_concurrency_limit_by_tag_concurrency_limits_tag__tag__get"]; - put?: never; - post?: never; - /** Delete Concurrency Limit By Tag */ - delete: operations["delete_concurrency_limit_by_tag_concurrency_limits_tag__tag__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Concurrency Limits - * @description Query for concurrency limits. - * - * For each concurrency limit the `active slots` field contains a list of TaskRun IDs - * currently using a concurrency slot for the specified tag. - */ - post: operations["read_concurrency_limits_concurrency_limits_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/tag/{tag}/reset": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Reset Concurrency Limit By Tag */ - post: operations["reset_concurrency_limit_by_tag_concurrency_limits_tag__tag__reset_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/increment": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Increment Concurrency Limits V1 */ - post: operations["increment_concurrency_limits_v1_concurrency_limits_increment_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/concurrency_limits/decrement": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Decrement Concurrency Limits V1 */ - post: operations["decrement_concurrency_limits_v1_concurrency_limits_decrement_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/v2/concurrency_limits/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Create Concurrency Limit V2 */ - post: operations["create_concurrency_limit_v2_v2_concurrency_limits__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/v2/concurrency_limits/{id_or_name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Concurrency Limit V2 */ - get: operations["read_concurrency_limit_v2_v2_concurrency_limits__id_or_name__get"]; - put?: never; - post?: never; - /** Delete Concurrency Limit V2 */ - delete: operations["delete_concurrency_limit_v2_v2_concurrency_limits__id_or_name__delete"]; - options?: never; - head?: never; - /** Update Concurrency Limit V2 */ - patch: operations["update_concurrency_limit_v2_v2_concurrency_limits__id_or_name__patch"]; - trace?: never; - }; - "/v2/concurrency_limits/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Read All Concurrency Limits V2 */ - post: operations["read_all_concurrency_limits_v2_v2_concurrency_limits_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/v2/concurrency_limits/increment": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Bulk Increment Active Slots */ - post: operations["bulk_increment_active_slots_v2_concurrency_limits_increment_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/v2/concurrency_limits/decrement": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Bulk Decrement Active Slots */ - post: operations["bulk_decrement_active_slots_v2_concurrency_limits_decrement_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_types/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Block Type - * @description Create a new block type - */ - post: operations["create_block_type_block_types__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_types/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Block Type By Id - * @description Get a block type by ID. - */ - get: operations["read_block_type_by_id_block_types__id__get"]; - put?: never; - post?: never; - /** Delete Block Type */ - delete: operations["delete_block_type_block_types__id__delete"]; - options?: never; - head?: never; - /** - * Update Block Type - * @description Update a block type. - */ - patch: operations["update_block_type_block_types__id__patch"]; - trace?: never; - }; - "/block_types/slug/{slug}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Block Type By Slug - * @description Get a block type by name. - */ - get: operations["read_block_type_by_slug_block_types_slug__slug__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_types/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Block Types - * @description Gets all block types. Optionally limit return with limit and offset. - */ - post: operations["read_block_types_block_types_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_types/slug/{slug}/block_documents": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Block Documents For Block Type */ - get: operations["read_block_documents_for_block_type_block_types_slug__slug__block_documents_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_types/slug/{slug}/block_documents/name/{block_document_name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Block Document By Name For Block Type */ - get: operations["read_block_document_by_name_for_block_type_block_types_slug__slug__block_documents_name__block_document_name__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_types/install_system_block_types": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Install System Block Types */ - post: operations["install_system_block_types_block_types_install_system_block_types_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_documents/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Block Document - * @description Create a new block document. - */ - post: operations["create_block_document_block_documents__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_documents/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Block Documents - * @description Query for block documents. - */ - post: operations["read_block_documents_block_documents_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_documents/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Block Documents - * @description Count block documents. - */ - post: operations["count_block_documents_block_documents_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_documents/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Block Document By Id */ - get: operations["read_block_document_by_id_block_documents__id__get"]; - put?: never; - post?: never; - /** Delete Block Document */ - delete: operations["delete_block_document_block_documents__id__delete"]; - options?: never; - head?: never; - /** Update Block Document Data */ - patch: operations["update_block_document_data_block_documents__id__patch"]; - trace?: never; - }; - "/work_pools/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Work Pool - * @description Creates a new work pool. If a work pool with the same - * name already exists, an error will be raised. - */ - post: operations["create_work_pool_work_pools__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Work Pool - * @description Read a work pool by name - */ - get: operations["read_work_pool_work_pools__name__get"]; - put?: never; - post?: never; - /** - * Delete Work Pool - * @description Delete a work pool - */ - delete: operations["delete_work_pool_work_pools__name__delete"]; - options?: never; - head?: never; - /** - * Update Work Pool - * @description Update a work pool - */ - patch: operations["update_work_pool_work_pools__name__patch"]; - trace?: never; - }; - "/work_pools/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Work Pools - * @description Read multiple work pools - */ - post: operations["read_work_pools_work_pools_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Work Pools - * @description Count work pools - */ - post: operations["count_work_pools_work_pools_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{name}/get_scheduled_flow_runs": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Get Scheduled Flow Runs - * @description Load scheduled runs for a worker - */ - post: operations["get_scheduled_flow_runs_work_pools__name__get_scheduled_flow_runs_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{work_pool_name}/queues": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Work Queue - * @description Creates a new work pool queue. If a work pool queue with the same - * name already exists, an error will be raised. - */ - post: operations["create_work_queue_work_pools__work_pool_name__queues_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{work_pool_name}/queues/{name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Work Queue - * @description Read a work pool queue - */ - get: operations["read_work_queue_work_pools__work_pool_name__queues__name__get"]; - put?: never; - post?: never; - /** - * Delete Work Queue - * @description Delete a work pool queue - */ - delete: operations["delete_work_queue_work_pools__work_pool_name__queues__name__delete"]; - options?: never; - head?: never; - /** - * Update Work Queue - * @description Update a work pool queue - */ - patch: operations["update_work_queue_work_pools__work_pool_name__queues__name__patch"]; - trace?: never; - }; - "/work_pools/{work_pool_name}/queues/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Work Queues - * @description Read all work pool queues - */ - post: operations["read_work_queues_work_pools__work_pool_name__queues_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{work_pool_name}/workers/heartbeat": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Worker Heartbeat */ - post: operations["worker_heartbeat_work_pools__work_pool_name__workers_heartbeat_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{work_pool_name}/workers/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Workers - * @description Read all worker processes - */ - post: operations["read_workers_work_pools__work_pool_name__workers_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_pools/{work_pool_name}/workers/{name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - post?: never; - /** - * Delete Worker - * @description Delete a work pool's worker - */ - delete: operations["delete_worker_work_pools__work_pool_name__workers__name__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/task_workers/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Task Workers - * @description Read active task workers. Optionally filter by task keys. - */ - post: operations["read_task_workers_task_workers_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_queues/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Work Queue - * @description Creates a new work queue. - * - * If a work queue with the same name already exists, an error - * will be raised. - */ - post: operations["create_work_queue_work_queues__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_queues/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Work Queue - * @description Get a work queue by id. - */ - get: operations["read_work_queue_work_queues__id__get"]; - put?: never; - post?: never; - /** - * Delete Work Queue - * @description Delete a work queue by id. - */ - delete: operations["delete_work_queue_work_queues__id__delete"]; - options?: never; - head?: never; - /** - * Update Work Queue - * @description Updates an existing work queue. - */ - patch: operations["update_work_queue_work_queues__id__patch"]; - trace?: never; - }; - "/work_queues/name/{name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Work Queue By Name - * @description Get a work queue by id. - */ - get: operations["read_work_queue_by_name_work_queues_name__name__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_queues/{id}/get_runs": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Work Queue Runs - * @description Get flow runs from the work queue. - */ - post: operations["read_work_queue_runs_work_queues__id__get_runs_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_queues/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Work Queues - * @description Query for work queues. - */ - post: operations["read_work_queues_work_queues_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/work_queues/{id}/status": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Work Queue Status - * @description Get the status of a work queue. - */ - get: operations["read_work_queue_status_work_queues__id__status_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/artifacts/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Create Artifact */ - post: operations["create_artifact_artifacts__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/artifacts/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Artifact - * @description Retrieve an artifact from the database. - */ - get: operations["read_artifact_artifacts__id__get"]; - put?: never; - post?: never; - /** - * Delete Artifact - * @description Delete an artifact from the database. - */ - delete: operations["delete_artifact_artifacts__id__delete"]; - options?: never; - head?: never; - /** - * Update Artifact - * @description Update an artifact in the database. - */ - patch: operations["update_artifact_artifacts__id__patch"]; - trace?: never; - }; - "/artifacts/{key}/latest": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Latest Artifact - * @description Retrieve the latest artifact from the artifact table. - */ - get: operations["read_latest_artifact_artifacts__key__latest_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/artifacts/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Artifacts - * @description Retrieve artifacts from the database. - */ - post: operations["read_artifacts_artifacts_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/artifacts/latest/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Latest Artifacts - * @description Retrieve artifacts from the database. - */ - post: operations["read_latest_artifacts_artifacts_latest_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/artifacts/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Artifacts - * @description Count artifacts from the database. - */ - post: operations["count_artifacts_artifacts_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/artifacts/latest/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Latest Artifacts - * @description Count artifacts from the database. - */ - post: operations["count_latest_artifacts_artifacts_latest_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_schemas/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Create Block Schema */ - post: operations["create_block_schema_block_schemas__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_schemas/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Block Schema By Id - * @description Get a block schema by id. - */ - get: operations["read_block_schema_by_id_block_schemas__id__get"]; - put?: never; - post?: never; - /** - * Delete Block Schema - * @description Delete a block schema by id. - */ - delete: operations["delete_block_schema_block_schemas__id__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_schemas/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Block Schemas - * @description Read all block schemas, optionally filtered by type - */ - post: operations["read_block_schemas_block_schemas_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_schemas/checksum/{checksum}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Block Schema By Checksum */ - get: operations["read_block_schema_by_checksum_block_schemas_checksum__checksum__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/block_capabilities/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Available Block Capabilities */ - get: operations["read_available_block_capabilities_block_capabilities__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/collections/views/{view}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read View Content - * @description Reads the content of a view from the prefect-collection-registry. - */ - get: operations["read_view_content_collections_views__view__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/variables/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Create Variable */ - post: operations["create_variable_variables__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/variables/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Variable */ - get: operations["read_variable_variables__id__get"]; - put?: never; - post?: never; - /** Delete Variable */ - delete: operations["delete_variable_variables__id__delete"]; - options?: never; - head?: never; - /** Update Variable */ - patch: operations["update_variable_variables__id__patch"]; - trace?: never; - }; - "/variables/name/{name}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Variable By Name */ - get: operations["read_variable_by_name_variables_name__name__get"]; - put?: never; - post?: never; - /** Delete Variable By Name */ - delete: operations["delete_variable_by_name_variables_name__name__delete"]; - options?: never; - head?: never; - /** Update Variable By Name */ - patch: operations["update_variable_by_name_variables_name__name__patch"]; - trace?: never; - }; - "/variables/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Read Variables */ - post: operations["read_variables_variables_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/variables/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Count Variables */ - post: operations["count_variables_variables_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/csrf-token": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Create Csrf Token - * @description Create or update a CSRF token for a client - */ - get: operations["create_csrf_token_csrf_token_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/events": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Events - * @description Record a batch of Events - */ - post: operations["create_events_events_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/events/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Read Events - * @description Queries for Events matching the given filter criteria in the given Account. Returns - * the first page of results, and the URL to request the next page (if there are more - * results). - */ - post: operations["read_events_events_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/events/filter/next": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Account Events Page - * @description Returns the next page of Events for a previous query against the given Account, and - * the URL to request the next page (if there are more results). - */ - get: operations["read_account_events_page_events_filter_next_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/events/count-by/{countable}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Account Events - * @description Returns distinct objects and the count of events associated with them. Objects - * that can be counted include the day the event occurred, the type of event, or - * the IDs of the resources associated with the event. - */ - post: operations["count_account_events_events_count_by__countable__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/automations/": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Create Automation */ - post: operations["create_automation_automations__post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/automations/{id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Automation */ - get: operations["read_automation_automations__id__get"]; - /** Update Automation */ - put: operations["update_automation_automations__id__put"]; - post?: never; - /** Delete Automation */ - delete: operations["delete_automation_automations__id__delete"]; - options?: never; - head?: never; - /** Patch Automation */ - patch: operations["patch_automation_automations__id__patch"]; - trace?: never; - }; - "/automations/filter": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Read Automations */ - post: operations["read_automations_automations_filter_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/automations/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Count Automations */ - post: operations["count_automations_automations_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/automations/related-to/{resource_id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Read Automations Related To Resource */ - get: operations["read_automations_related_to_resource_automations_related_to__resource_id__get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/automations/owned-by/{resource_id}": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - post?: never; - /** Delete Automations Owned By Resource */ - delete: operations["delete_automations_owned_by_resource_automations_owned_by__resource_id__delete"]; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/templates/validate": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Validate Template */ - post: operations["validate_template_templates_validate_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/flows/count-deployments": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Deployments By Flow - * @description Get deployment counts by flow id. - */ - post: operations["count_deployments_by_flow_ui_flows_count_deployments_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/flows/next-runs": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Next Runs By Flow - * @description Get the next flow run by flow id. - */ - post: operations["next_runs_by_flow_ui_flows_next_runs_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/flow_runs/history": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Read Flow Run History */ - post: operations["read_flow_run_history_ui_flow_runs_history_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/flow_runs/count-task-runs": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Count Task Runs By Flow Run - * @description Get task run counts by flow run id. - */ - post: operations["count_task_runs_by_flow_run_ui_flow_runs_count_task_runs_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/schemas/validate": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Validate Obj */ - post: operations["validate_obj_ui_schemas_validate_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/task_runs/dashboard/counts": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Read Dashboard Task Run Counts */ - post: operations["read_dashboard_task_run_counts_ui_task_runs_dashboard_counts_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ui/task_runs/count": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** Read Task Run Counts By State */ - post: operations["read_task_run_counts_by_state_ui_task_runs_count_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/admin/settings": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Settings - * @description Get the current Prefect REST API settings. - * - * Secret setting values will be obfuscated. - */ - get: operations["read_settings_admin_settings_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/admin/version": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Read Version - * @description Returns the Prefect version number - */ - get: operations["read_version_admin_version_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/admin/database/clear": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Clear Database - * @description Clear all database tables without dropping them. - */ - post: operations["clear_database_admin_database_clear_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/admin/database/drop": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Drop Database - * @description Drop all database objects. - */ - post: operations["drop_database_admin_database_drop_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/admin/database/create": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - get?: never; - put?: never; - /** - * Create Database - * @description Create all database objects. - */ - post: operations["create_database_admin_database_create_post"]; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/hello": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** - * Hello - * @description Say hello! - */ - get: operations["hello_hello_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; - "/ready": { - parameters: { - query?: never; - header?: never; - path?: never; - cookie?: never; - }; - /** Perform Readiness Check */ - get: operations["perform_readiness_check_ready_get"]; - put?: never; - post?: never; - delete?: never; - options?: never; - head?: never; - patch?: never; - trace?: never; - }; + "/health": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Health Check */ + get: operations["health_check_health_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/version": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Server Version */ + get: operations["server_version_version_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flows/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Flow + * @description Gracefully creates a new flow from the provided schema. If a flow with the + * same name already exists, the existing flow is returned. + */ + post: operations["create_flow_flows__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flows/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow + * @description Get a flow by id. + */ + get: operations["read_flow_flows__id__get"]; + put?: never; + post?: never; + /** + * Delete Flow + * @description Delete a flow by id. + */ + delete: operations["delete_flow_flows__id__delete"]; + options?: never; + head?: never; + /** + * Update Flow + * @description Updates a flow. + */ + patch: operations["update_flow_flows__id__patch"]; + trace?: never; + }; + "/flows/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Flows + * @description Count flows. + */ + post: operations["count_flows_flows_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flows/name/{name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow By Name + * @description Get a flow by name. + */ + get: operations["read_flow_by_name_flows_name__name__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flows/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Flows + * @description Query for flows. + */ + post: operations["read_flows_flows_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flows/paginate": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Paginate Flows + * @description Pagination query for flows. + */ + post: operations["paginate_flows_flows_paginate_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Flow Run + * @description Create a flow run. If a flow run with the same flow_id and + * idempotency key already exists, the existing flow run will be returned. + * + * If no state is provided, the flow run will be created in a PENDING state. + */ + post: operations["create_flow_run_flow_runs__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run + * @description Get a flow run by id. + */ + get: operations["read_flow_run_flow_runs__id__get"]; + put?: never; + post?: never; + /** + * Delete Flow Run + * @description Delete a flow run by id. + */ + delete: operations["delete_flow_run_flow_runs__id__delete"]; + options?: never; + head?: never; + /** + * Update Flow Run + * @description Updates a flow run. + */ + patch: operations["update_flow_run_flow_runs__id__patch"]; + trace?: never; + }; + "/flow_runs/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Flow Runs + * @description Query for flow runs. + */ + post: operations["count_flow_runs_flow_runs_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/lateness": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Average Flow Run Lateness + * @description Query for average flow-run lateness in seconds. + */ + post: operations["average_flow_run_lateness_flow_runs_lateness_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/history": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Flow Run History + * @description Query for flow run history data across a given range and interval. + */ + post: operations["flow_run_history_flow_runs_history_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/graph": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run Graph V1 + * @description Get a task run dependency map for a given flow run. + */ + get: operations["read_flow_run_graph_v1_flow_runs__id__graph_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/graph-v2": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run Graph V2 + * @description Get a graph of the tasks and subflow runs for the given flow run + */ + get: operations["read_flow_run_graph_v2_flow_runs__id__graph_v2_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/resume": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Resume Flow Run + * @description Resume a paused flow run. + */ + post: operations["resume_flow_run_flow_runs__id__resume_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Flow Runs + * @description Query for flow runs. + */ + post: operations["read_flow_runs_flow_runs_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/set_state": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Set Flow Run State + * @description Set a flow run state, invoking any orchestration rules. + */ + post: operations["set_flow_run_state_flow_runs__id__set_state_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/input": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Flow Run Input + * @description Create a key/value input for a flow run. + */ + post: operations["create_flow_run_input_flow_runs__id__input_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/input/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Filter Flow Run Input + * @description Filter flow run inputs by key prefix + */ + post: operations["filter_flow_run_input_flow_runs__id__input_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/input/{key}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run Input + * @description Create a value from a flow run input + */ + get: operations["read_flow_run_input_flow_runs__id__input__key__get"]; + put?: never; + post?: never; + /** + * Delete Flow Run Input + * @description Delete a flow run input + */ + delete: operations["delete_flow_run_input_flow_runs__id__input__key__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/paginate": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Paginate Flow Runs + * @description Pagination query for flow runs. + */ + post: operations["paginate_flow_runs_flow_runs_paginate_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_runs/{id}/logs/download": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Download Logs + * @description Download all flow run logs as a CSV file, collecting all logs until there are no more logs to retrieve. + */ + get: operations["download_logs_flow_runs__id__logs_download_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_runs/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Task Run + * @description Create a task run. If a task run with the same flow_run_id, + * task_key, and dynamic_key already exists, the existing task + * run will be returned. + * + * If no state is provided, the task run will be created in a PENDING state. + */ + post: operations["create_task_run_task_runs__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_runs/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Task Run + * @description Get a task run by id. + */ + get: operations["read_task_run_task_runs__id__get"]; + put?: never; + post?: never; + /** + * Delete Task Run + * @description Delete a task run by id. + */ + delete: operations["delete_task_run_task_runs__id__delete"]; + options?: never; + head?: never; + /** + * Update Task Run + * @description Updates a task run. + */ + patch: operations["update_task_run_task_runs__id__patch"]; + trace?: never; + }; + "/task_runs/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Task Runs + * @description Count task runs. + */ + post: operations["count_task_runs_task_runs_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_runs/history": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Task Run History + * @description Query for task run history data across a given range and interval. + */ + post: operations["task_run_history_task_runs_history_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_runs/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Task Runs + * @description Query for task runs. + */ + post: operations["read_task_runs_task_runs_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_runs/{id}/set_state": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Set Task Run State + * @description Set a task run state, invoking any orchestration rules. + */ + post: operations["set_task_run_state_task_runs__id__set_state_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_run_states/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run State + * @description Get a flow run state by id. + */ + get: operations["read_flow_run_state_flow_run_states__id__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_run_states/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run States + * @description Get states associated with a flow run. + */ + get: operations["read_flow_run_states_flow_run_states__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_run_states/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Task Run State + * @description Get a task run state by id. + */ + get: operations["read_task_run_state_task_run_states__id__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_run_states/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Task Run States + * @description Get states associated with a task run. + */ + get: operations["read_task_run_states_task_run_states__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_run_notification_policies/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Flow Run Notification Policy + * @description Creates a new flow run notification policy. + */ + post: operations["create_flow_run_notification_policy_flow_run_notification_policies__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/flow_run_notification_policies/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Flow Run Notification Policy + * @description Get a flow run notification policy by id. + */ + get: operations["read_flow_run_notification_policy_flow_run_notification_policies__id__get"]; + put?: never; + post?: never; + /** + * Delete Flow Run Notification Policy + * @description Delete a flow run notification policy by id. + */ + delete: operations["delete_flow_run_notification_policy_flow_run_notification_policies__id__delete"]; + options?: never; + head?: never; + /** + * Update Flow Run Notification Policy + * @description Updates an existing flow run notification policy. + */ + patch: operations["update_flow_run_notification_policy_flow_run_notification_policies__id__patch"]; + trace?: never; + }; + "/flow_run_notification_policies/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Flow Run Notification Policies + * @description Query for flow run notification policies. + */ + post: operations["read_flow_run_notification_policies_flow_run_notification_policies_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Deployment + * @description Gracefully creates a new deployment from the provided schema. If a deployment with + * the same name and flow_id already exists, the deployment is updated. + * + * If the deployment has an active schedule, flow runs will be scheduled. + * When upserting, any scheduled runs from the existing deployment will be deleted. + */ + post: operations["create_deployment_deployments__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Deployment + * @description Get a deployment by id. + */ + get: operations["read_deployment_deployments__id__get"]; + put?: never; + post?: never; + /** + * Delete Deployment + * @description Delete a deployment by id. + */ + delete: operations["delete_deployment_deployments__id__delete"]; + options?: never; + head?: never; + /** Update Deployment */ + patch: operations["update_deployment_deployments__id__patch"]; + trace?: never; + }; + "/deployments/name/{flow_name}/{deployment_name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Deployment By Name + * @description Get a deployment using the name of the flow and the deployment. + */ + get: operations["read_deployment_by_name_deployments_name__flow_name___deployment_name__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Deployments + * @description Query for deployments. + */ + post: operations["read_deployments_deployments_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/paginate": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Paginate Deployments + * @description Pagination query for flow runs. + */ + post: operations["paginate_deployments_deployments_paginate_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/get_scheduled_flow_runs": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Get Scheduled Flow Runs For Deployments + * @description Get scheduled runs for a set of deployments. Used by a runner to poll for work. + */ + post: operations["get_scheduled_flow_runs_for_deployments_deployments_get_scheduled_flow_runs_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Deployments + * @description Count deployments. + */ + post: operations["count_deployments_deployments_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/schedule": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Schedule Deployment + * @description Schedule runs for a deployment. For backfills, provide start/end times in the past. + * + * This function will generate the minimum number of runs that satisfy the min + * and max times, and the min and max counts. Specifically, the following order + * will be respected. + * + * - Runs will be generated starting on or after the `start_time` + * - No more than `max_runs` runs will be generated + * - No runs will be generated after `end_time` is reached + * - At least `min_runs` runs will be generated + * - Runs will be generated until at least `start_time + min_time` is reached + */ + post: operations["schedule_deployment_deployments__id__schedule_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/resume_deployment": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Resume Deployment + * @description Set a deployment schedule to active. Runs will be scheduled immediately. + */ + post: operations["resume_deployment_deployments__id__resume_deployment_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/pause_deployment": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Pause Deployment + * @description Set a deployment schedule to inactive. Any auto-scheduled runs still in a Scheduled + * state will be deleted. + */ + post: operations["pause_deployment_deployments__id__pause_deployment_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/create_flow_run": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Flow Run From Deployment + * @description Create a flow run from a deployment. + * + * Any parameters not provided will be inferred from the deployment's parameters. + * If tags are not provided, the deployment's tags will be used. + * + * If no state is provided, the flow run will be created in a SCHEDULED state. + */ + post: operations["create_flow_run_from_deployment_deployments__id__create_flow_run_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/work_queue_check": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Work Queue Check For Deployment + * @deprecated + * @description Get list of work-queues that are able to pick up the specified deployment. + * + * This endpoint is intended to be used by the UI to provide users warnings + * about deployments that are unable to be executed because there are no work + * queues that will pick up their runs, based on existing filter criteria. It + * may be deprecated in the future because there is not a strict relationship + * between work queues and deployments. + */ + get: operations["work_queue_check_for_deployment_deployments__id__work_queue_check_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/schedules": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Deployment Schedules */ + get: operations["read_deployment_schedules_deployments__id__schedules_get"]; + put?: never; + /** Create Deployment Schedules */ + post: operations["create_deployment_schedules_deployments__id__schedules_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/deployments/{id}/schedules/{schedule_id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + /** Delete Deployment Schedule */ + delete: operations["delete_deployment_schedule_deployments__id__schedules__schedule_id__delete"]; + options?: never; + head?: never; + /** Update Deployment Schedule */ + patch: operations["update_deployment_schedule_deployments__id__schedules__schedule_id__patch"]; + trace?: never; + }; + "/saved_searches/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + /** + * Create Saved Search + * @description Gracefully creates a new saved search from the provided schema. + * + * If a saved search with the same name already exists, the saved search's fields are + * replaced. + */ + put: operations["create_saved_search_saved_searches__put"]; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/saved_searches/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Saved Search + * @description Get a saved search by id. + */ + get: operations["read_saved_search_saved_searches__id__get"]; + put?: never; + post?: never; + /** + * Delete Saved Search + * @description Delete a saved search by id. + */ + delete: operations["delete_saved_search_saved_searches__id__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/saved_searches/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Saved Searches + * @description Query for saved searches. + */ + post: operations["read_saved_searches_saved_searches_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/logs/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Logs + * @description Create new logs from the provided schema. + */ + post: operations["create_logs_logs__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/logs/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Logs + * @description Query for logs. + */ + post: operations["read_logs_logs_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Create Concurrency Limit */ + post: operations["create_concurrency_limit_concurrency_limits__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Concurrency Limit + * @description Get a concurrency limit by id. + * + * The `active slots` field contains a list of TaskRun IDs currently using a + * concurrency slot for the specified tag. + */ + get: operations["read_concurrency_limit_concurrency_limits__id__get"]; + put?: never; + post?: never; + /** Delete Concurrency Limit */ + delete: operations["delete_concurrency_limit_concurrency_limits__id__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/tag/{tag}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Concurrency Limit By Tag + * @description Get a concurrency limit by tag. + * + * The `active slots` field contains a list of TaskRun IDs currently using a + * concurrency slot for the specified tag. + */ + get: operations["read_concurrency_limit_by_tag_concurrency_limits_tag__tag__get"]; + put?: never; + post?: never; + /** Delete Concurrency Limit By Tag */ + delete: operations["delete_concurrency_limit_by_tag_concurrency_limits_tag__tag__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Concurrency Limits + * @description Query for concurrency limits. + * + * For each concurrency limit the `active slots` field contains a list of TaskRun IDs + * currently using a concurrency slot for the specified tag. + */ + post: operations["read_concurrency_limits_concurrency_limits_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/tag/{tag}/reset": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Reset Concurrency Limit By Tag */ + post: operations["reset_concurrency_limit_by_tag_concurrency_limits_tag__tag__reset_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/increment": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Increment Concurrency Limits V1 */ + post: operations["increment_concurrency_limits_v1_concurrency_limits_increment_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/concurrency_limits/decrement": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Decrement Concurrency Limits V1 */ + post: operations["decrement_concurrency_limits_v1_concurrency_limits_decrement_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/v2/concurrency_limits/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Create Concurrency Limit V2 */ + post: operations["create_concurrency_limit_v2_v2_concurrency_limits__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/v2/concurrency_limits/{id_or_name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Concurrency Limit V2 */ + get: operations["read_concurrency_limit_v2_v2_concurrency_limits__id_or_name__get"]; + put?: never; + post?: never; + /** Delete Concurrency Limit V2 */ + delete: operations["delete_concurrency_limit_v2_v2_concurrency_limits__id_or_name__delete"]; + options?: never; + head?: never; + /** Update Concurrency Limit V2 */ + patch: operations["update_concurrency_limit_v2_v2_concurrency_limits__id_or_name__patch"]; + trace?: never; + }; + "/v2/concurrency_limits/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Read All Concurrency Limits V2 */ + post: operations["read_all_concurrency_limits_v2_v2_concurrency_limits_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/v2/concurrency_limits/increment": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Bulk Increment Active Slots */ + post: operations["bulk_increment_active_slots_v2_concurrency_limits_increment_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/v2/concurrency_limits/decrement": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Bulk Decrement Active Slots */ + post: operations["bulk_decrement_active_slots_v2_concurrency_limits_decrement_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_types/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Block Type + * @description Create a new block type + */ + post: operations["create_block_type_block_types__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_types/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Block Type By Id + * @description Get a block type by ID. + */ + get: operations["read_block_type_by_id_block_types__id__get"]; + put?: never; + post?: never; + /** Delete Block Type */ + delete: operations["delete_block_type_block_types__id__delete"]; + options?: never; + head?: never; + /** + * Update Block Type + * @description Update a block type. + */ + patch: operations["update_block_type_block_types__id__patch"]; + trace?: never; + }; + "/block_types/slug/{slug}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Block Type By Slug + * @description Get a block type by name. + */ + get: operations["read_block_type_by_slug_block_types_slug__slug__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_types/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Block Types + * @description Gets all block types. Optionally limit return with limit and offset. + */ + post: operations["read_block_types_block_types_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_types/slug/{slug}/block_documents": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Block Documents For Block Type */ + get: operations["read_block_documents_for_block_type_block_types_slug__slug__block_documents_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_types/slug/{slug}/block_documents/name/{block_document_name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Block Document By Name For Block Type */ + get: operations["read_block_document_by_name_for_block_type_block_types_slug__slug__block_documents_name__block_document_name__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_types/install_system_block_types": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Install System Block Types */ + post: operations["install_system_block_types_block_types_install_system_block_types_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_documents/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Block Document + * @description Create a new block document. + */ + post: operations["create_block_document_block_documents__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_documents/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Block Documents + * @description Query for block documents. + */ + post: operations["read_block_documents_block_documents_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_documents/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Block Documents + * @description Count block documents. + */ + post: operations["count_block_documents_block_documents_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_documents/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Block Document By Id */ + get: operations["read_block_document_by_id_block_documents__id__get"]; + put?: never; + post?: never; + /** Delete Block Document */ + delete: operations["delete_block_document_block_documents__id__delete"]; + options?: never; + head?: never; + /** Update Block Document Data */ + patch: operations["update_block_document_data_block_documents__id__patch"]; + trace?: never; + }; + "/work_pools/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Work Pool + * @description Creates a new work pool. If a work pool with the same + * name already exists, an error will be raised. + */ + post: operations["create_work_pool_work_pools__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Work Pool + * @description Read a work pool by name + */ + get: operations["read_work_pool_work_pools__name__get"]; + put?: never; + post?: never; + /** + * Delete Work Pool + * @description Delete a work pool + */ + delete: operations["delete_work_pool_work_pools__name__delete"]; + options?: never; + head?: never; + /** + * Update Work Pool + * @description Update a work pool + */ + patch: operations["update_work_pool_work_pools__name__patch"]; + trace?: never; + }; + "/work_pools/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Work Pools + * @description Read multiple work pools + */ + post: operations["read_work_pools_work_pools_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Work Pools + * @description Count work pools + */ + post: operations["count_work_pools_work_pools_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{name}/get_scheduled_flow_runs": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Get Scheduled Flow Runs + * @description Load scheduled runs for a worker + */ + post: operations["get_scheduled_flow_runs_work_pools__name__get_scheduled_flow_runs_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{work_pool_name}/queues": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Work Queue + * @description Creates a new work pool queue. If a work pool queue with the same + * name already exists, an error will be raised. + */ + post: operations["create_work_queue_work_pools__work_pool_name__queues_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{work_pool_name}/queues/{name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Work Queue + * @description Read a work pool queue + */ + get: operations["read_work_queue_work_pools__work_pool_name__queues__name__get"]; + put?: never; + post?: never; + /** + * Delete Work Queue + * @description Delete a work pool queue + */ + delete: operations["delete_work_queue_work_pools__work_pool_name__queues__name__delete"]; + options?: never; + head?: never; + /** + * Update Work Queue + * @description Update a work pool queue + */ + patch: operations["update_work_queue_work_pools__work_pool_name__queues__name__patch"]; + trace?: never; + }; + "/work_pools/{work_pool_name}/queues/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Work Queues + * @description Read all work pool queues + */ + post: operations["read_work_queues_work_pools__work_pool_name__queues_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{work_pool_name}/workers/heartbeat": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Worker Heartbeat */ + post: operations["worker_heartbeat_work_pools__work_pool_name__workers_heartbeat_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{work_pool_name}/workers/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Workers + * @description Read all worker processes + */ + post: operations["read_workers_work_pools__work_pool_name__workers_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_pools/{work_pool_name}/workers/{name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + /** + * Delete Worker + * @description Delete a work pool's worker + */ + delete: operations["delete_worker_work_pools__work_pool_name__workers__name__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/task_workers/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Task Workers + * @description Read active task workers. Optionally filter by task keys. + */ + post: operations["read_task_workers_task_workers_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_queues/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Work Queue + * @description Creates a new work queue. + * + * If a work queue with the same name already exists, an error + * will be raised. + */ + post: operations["create_work_queue_work_queues__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_queues/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Work Queue + * @description Get a work queue by id. + */ + get: operations["read_work_queue_work_queues__id__get"]; + put?: never; + post?: never; + /** + * Delete Work Queue + * @description Delete a work queue by id. + */ + delete: operations["delete_work_queue_work_queues__id__delete"]; + options?: never; + head?: never; + /** + * Update Work Queue + * @description Updates an existing work queue. + */ + patch: operations["update_work_queue_work_queues__id__patch"]; + trace?: never; + }; + "/work_queues/name/{name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Work Queue By Name + * @description Get a work queue by id. + */ + get: operations["read_work_queue_by_name_work_queues_name__name__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_queues/{id}/get_runs": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Work Queue Runs + * @description Get flow runs from the work queue. + */ + post: operations["read_work_queue_runs_work_queues__id__get_runs_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_queues/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Work Queues + * @description Query for work queues. + */ + post: operations["read_work_queues_work_queues_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/work_queues/{id}/status": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Work Queue Status + * @description Get the status of a work queue. + */ + get: operations["read_work_queue_status_work_queues__id__status_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/artifacts/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Create Artifact */ + post: operations["create_artifact_artifacts__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/artifacts/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Artifact + * @description Retrieve an artifact from the database. + */ + get: operations["read_artifact_artifacts__id__get"]; + put?: never; + post?: never; + /** + * Delete Artifact + * @description Delete an artifact from the database. + */ + delete: operations["delete_artifact_artifacts__id__delete"]; + options?: never; + head?: never; + /** + * Update Artifact + * @description Update an artifact in the database. + */ + patch: operations["update_artifact_artifacts__id__patch"]; + trace?: never; + }; + "/artifacts/{key}/latest": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Latest Artifact + * @description Retrieve the latest artifact from the artifact table. + */ + get: operations["read_latest_artifact_artifacts__key__latest_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/artifacts/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Artifacts + * @description Retrieve artifacts from the database. + */ + post: operations["read_artifacts_artifacts_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/artifacts/latest/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Latest Artifacts + * @description Retrieve artifacts from the database. + */ + post: operations["read_latest_artifacts_artifacts_latest_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/artifacts/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Artifacts + * @description Count artifacts from the database. + */ + post: operations["count_artifacts_artifacts_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/artifacts/latest/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Latest Artifacts + * @description Count artifacts from the database. + */ + post: operations["count_latest_artifacts_artifacts_latest_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_schemas/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Create Block Schema */ + post: operations["create_block_schema_block_schemas__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_schemas/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Block Schema By Id + * @description Get a block schema by id. + */ + get: operations["read_block_schema_by_id_block_schemas__id__get"]; + put?: never; + post?: never; + /** + * Delete Block Schema + * @description Delete a block schema by id. + */ + delete: operations["delete_block_schema_block_schemas__id__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_schemas/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Block Schemas + * @description Read all block schemas, optionally filtered by type + */ + post: operations["read_block_schemas_block_schemas_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_schemas/checksum/{checksum}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Block Schema By Checksum */ + get: operations["read_block_schema_by_checksum_block_schemas_checksum__checksum__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/block_capabilities/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Available Block Capabilities */ + get: operations["read_available_block_capabilities_block_capabilities__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/collections/views/{view}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read View Content + * @description Reads the content of a view from the prefect-collection-registry. + */ + get: operations["read_view_content_collections_views__view__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/variables/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Create Variable */ + post: operations["create_variable_variables__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/variables/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Variable */ + get: operations["read_variable_variables__id__get"]; + put?: never; + post?: never; + /** Delete Variable */ + delete: operations["delete_variable_variables__id__delete"]; + options?: never; + head?: never; + /** Update Variable */ + patch: operations["update_variable_variables__id__patch"]; + trace?: never; + }; + "/variables/name/{name}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Variable By Name */ + get: operations["read_variable_by_name_variables_name__name__get"]; + put?: never; + post?: never; + /** Delete Variable By Name */ + delete: operations["delete_variable_by_name_variables_name__name__delete"]; + options?: never; + head?: never; + /** Update Variable By Name */ + patch: operations["update_variable_by_name_variables_name__name__patch"]; + trace?: never; + }; + "/variables/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Read Variables */ + post: operations["read_variables_variables_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/variables/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Count Variables */ + post: operations["count_variables_variables_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/csrf-token": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Create Csrf Token + * @description Create or update a CSRF token for a client + */ + get: operations["create_csrf_token_csrf_token_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/events": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Events + * @description Record a batch of Events + */ + post: operations["create_events_events_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/events/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Read Events + * @description Queries for Events matching the given filter criteria in the given Account. Returns + * the first page of results, and the URL to request the next page (if there are more + * results). + */ + post: operations["read_events_events_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/events/filter/next": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Account Events Page + * @description Returns the next page of Events for a previous query against the given Account, and + * the URL to request the next page (if there are more results). + */ + get: operations["read_account_events_page_events_filter_next_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/events/count-by/{countable}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Account Events + * @description Returns distinct objects and the count of events associated with them. Objects + * that can be counted include the day the event occurred, the type of event, or + * the IDs of the resources associated with the event. + */ + post: operations["count_account_events_events_count_by__countable__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/automations/": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Create Automation */ + post: operations["create_automation_automations__post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/automations/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Automation */ + get: operations["read_automation_automations__id__get"]; + /** Update Automation */ + put: operations["update_automation_automations__id__put"]; + post?: never; + /** Delete Automation */ + delete: operations["delete_automation_automations__id__delete"]; + options?: never; + head?: never; + /** Patch Automation */ + patch: operations["patch_automation_automations__id__patch"]; + trace?: never; + }; + "/automations/filter": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Read Automations */ + post: operations["read_automations_automations_filter_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/automations/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Count Automations */ + post: operations["count_automations_automations_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/automations/related-to/{resource_id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Read Automations Related To Resource */ + get: operations["read_automations_related_to_resource_automations_related_to__resource_id__get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/automations/owned-by/{resource_id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + /** Delete Automations Owned By Resource */ + delete: operations["delete_automations_owned_by_resource_automations_owned_by__resource_id__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/templates/validate": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Validate Template */ + post: operations["validate_template_templates_validate_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/flows/count-deployments": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Deployments By Flow + * @description Get deployment counts by flow id. + */ + post: operations["count_deployments_by_flow_ui_flows_count_deployments_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/flows/next-runs": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Next Runs By Flow + * @description Get the next flow run by flow id. + */ + post: operations["next_runs_by_flow_ui_flows_next_runs_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/flow_runs/history": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Read Flow Run History */ + post: operations["read_flow_run_history_ui_flow_runs_history_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/flow_runs/count-task-runs": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Count Task Runs By Flow Run + * @description Get task run counts by flow run id. + */ + post: operations["count_task_runs_by_flow_run_ui_flow_runs_count_task_runs_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/schemas/validate": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Validate Obj */ + post: operations["validate_obj_ui_schemas_validate_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/task_runs/dashboard/counts": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Read Dashboard Task Run Counts */ + post: operations["read_dashboard_task_run_counts_ui_task_runs_dashboard_counts_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ui/task_runs/count": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Read Task Run Counts By State */ + post: operations["read_task_run_counts_by_state_ui_task_runs_count_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/admin/settings": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Settings + * @description Get the current Prefect REST API settings. + * + * Secret setting values will be obfuscated. + */ + get: operations["read_settings_admin_settings_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/admin/version": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Read Version + * @description Returns the Prefect version number + */ + get: operations["read_version_admin_version_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/admin/database/clear": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Clear Database + * @description Clear all database tables without dropping them. + */ + post: operations["clear_database_admin_database_clear_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/admin/database/drop": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Drop Database + * @description Drop all database objects. + */ + post: operations["drop_database_admin_database_drop_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/admin/database/create": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Create Database + * @description Create all database objects. + */ + post: operations["create_database_admin_database_create_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/hello": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Hello + * @description Say hello! + */ + get: operations["hello_hello_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/ready": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Perform Readiness Check */ + get: operations["perform_readiness_check_ready_get"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; } export type webhooks = Record; export interface components { - schemas: { - /** Artifact */ - Artifact: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Key - * @description An optional unique reference key for this artifact. - */ - key?: string | null; - /** - * Type - * @description An identifier that describes the shape of the data field. e.g. 'result', 'table', 'markdown' - */ - type?: string | null; - /** - * Description - * @description A markdown-enabled description of the artifact. - */ - description?: string | null; - /** - * Data - * @description Data associated with the artifact, e.g. a result.; structure depends on the artifact type. - */ - data?: Record | unknown | null; - /** - * Metadata - * @description User-defined artifact metadata. Content must be string key and value pairs. - */ - metadata_?: { - [key: string]: string; - } | null; - /** - * Flow Run Id - * @description The flow run associated with the artifact. - */ - flow_run_id?: string | null; - /** - * Task Run Id - * @description The task run associated with the artifact. - */ - task_run_id?: string | null; - }; - /** ArtifactCollection */ - ArtifactCollection: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Key - * @description An optional unique reference key for this artifact. - */ - key: string; - /** - * Latest Id - * Format: uuid - * @description The latest artifact ID associated with the key. - */ - latest_id: string; - /** - * Type - * @description An identifier that describes the shape of the data field. e.g. 'result', 'table', 'markdown' - */ - type?: string | null; - /** - * Description - * @description A markdown-enabled description of the artifact. - */ - description?: string | null; - /** - * Data - * @description Data associated with the artifact, e.g. a result.; structure depends on the artifact type. - */ - data?: Record | unknown | null; - /** - * Metadata - * @description User-defined artifact metadata. Content must be string key and value pairs. - */ - metadata_?: { - [key: string]: string; - } | null; - /** - * Flow Run Id - * @description The flow run associated with the artifact. - */ - flow_run_id?: string | null; - /** - * Task Run Id - * @description The task run associated with the artifact. - */ - task_run_id?: string | null; - }; - /** - * ArtifactCollectionFilter - * @description Filter artifact collections. Only artifact collections matching all criteria will be returned - */ - ArtifactCollectionFilter: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** @description Filter criteria for `Artifact.id` */ - latest_id?: - | components["schemas"]["ArtifactCollectionFilterLatestId"] - | null; - /** @description Filter criteria for `Artifact.key` */ - key?: components["schemas"]["ArtifactCollectionFilterKey"] | null; - /** @description Filter criteria for `Artifact.flow_run_id` */ - flow_run_id?: - | components["schemas"]["ArtifactCollectionFilterFlowRunId"] - | null; - /** @description Filter criteria for `Artifact.task_run_id` */ - task_run_id?: - | components["schemas"]["ArtifactCollectionFilterTaskRunId"] - | null; - /** @description Filter criteria for `Artifact.type` */ - type?: components["schemas"]["ArtifactCollectionFilterType"] | null; - }; - /** - * ArtifactCollectionFilterFlowRunId - * @description Filter by `ArtifactCollection.flow_run_id`. - */ - ArtifactCollectionFilterFlowRunId: { - /** - * Any - * @description A list of flow run IDs to include - */ - any_?: string[] | null; - }; - /** - * ArtifactCollectionFilterKey - * @description Filter by `ArtifactCollection.key`. - */ - ArtifactCollectionFilterKey: { - /** - * Any - * @description A list of artifact keys to include - */ - any_?: string[] | null; - /** - * Like - * @description A string to match artifact keys against. This can include SQL wildcard characters like `%` and `_`. - */ - like_?: string | null; - /** - * Exists - * @description If `true`, only include artifacts with a non-null key. If `false`, only include artifacts with a null key. Should return all rows in the ArtifactCollection table if specified. - */ - exists_?: boolean | null; - }; - /** - * ArtifactCollectionFilterLatestId - * @description Filter by `ArtifactCollection.latest_id`. - */ - ArtifactCollectionFilterLatestId: { - /** - * Any - * @description A list of artifact ids to include - */ - any_?: string[] | null; - }; - /** - * ArtifactCollectionFilterTaskRunId - * @description Filter by `ArtifactCollection.task_run_id`. - */ - ArtifactCollectionFilterTaskRunId: { - /** - * Any - * @description A list of task run IDs to include - */ - any_?: string[] | null; - }; - /** - * ArtifactCollectionFilterType - * @description Filter by `ArtifactCollection.type`. - */ - ArtifactCollectionFilterType: { - /** - * Any - * @description A list of artifact types to include - */ - any_?: string[] | null; - /** - * Not Any - * @description A list of artifact types to exclude - */ - not_any_?: string[] | null; - }; - /** - * ArtifactCollectionSort - * @description Defines artifact collection sorting options. - * @enum {string} - */ - ArtifactCollectionSort: - | "CREATED_DESC" - | "UPDATED_DESC" - | "ID_DESC" - | "KEY_DESC" - | "KEY_ASC"; - /** - * ArtifactCreate - * @description Data used by the Prefect REST API to create an artifact. - */ - ArtifactCreate: { - /** - * Key - * @description An optional unique reference key for this artifact. - */ - key?: string | null; - /** - * Type - * @description An identifier that describes the shape of the data field. e.g. 'result', 'table', 'markdown' - */ - type?: string | null; - /** - * Description - * @description A markdown-enabled description of the artifact. - */ - description?: string | null; - /** - * Data - * @description Data associated with the artifact, e.g. a result.; structure depends on the artifact type. - */ - data?: Record | unknown | null; - /** - * Metadata - * @description User-defined artifact metadata. Content must be string key and value pairs. - */ - metadata_?: { - [key: string]: string; - } | null; - /** - * Flow Run Id - * @description The flow run associated with the artifact. - */ - flow_run_id?: string | null; - /** - * Task Run Id - * @description The task run associated with the artifact. - */ - task_run_id?: string | null; - }; - /** - * ArtifactFilter - * @description Filter artifacts. Only artifacts matching all criteria will be returned - */ - ArtifactFilter: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** @description Filter criteria for `Artifact.id` */ - id?: components["schemas"]["ArtifactFilterId"] | null; - /** @description Filter criteria for `Artifact.key` */ - key?: components["schemas"]["ArtifactFilterKey"] | null; - /** @description Filter criteria for `Artifact.flow_run_id` */ - flow_run_id?: components["schemas"]["ArtifactFilterFlowRunId"] | null; - /** @description Filter criteria for `Artifact.task_run_id` */ - task_run_id?: components["schemas"]["ArtifactFilterTaskRunId"] | null; - /** @description Filter criteria for `Artifact.type` */ - type?: components["schemas"]["ArtifactFilterType"] | null; - }; - /** - * ArtifactFilterFlowRunId - * @description Filter by `Artifact.flow_run_id`. - */ - ArtifactFilterFlowRunId: { - /** - * Any - * @description A list of flow run IDs to include - */ - any_?: string[] | null; - }; - /** - * ArtifactFilterId - * @description Filter by `Artifact.id`. - */ - ArtifactFilterId: { - /** - * Any - * @description A list of artifact ids to include - */ - any_?: string[] | null; - }; - /** - * ArtifactFilterKey - * @description Filter by `Artifact.key`. - */ - ArtifactFilterKey: { - /** - * Any - * @description A list of artifact keys to include - */ - any_?: string[] | null; - /** - * Like - * @description A string to match artifact keys against. This can include SQL wildcard characters like `%` and `_`. - */ - like_?: string | null; - /** - * Exists - * @description If `true`, only include artifacts with a non-null key. If `false`, only include artifacts with a null key. - */ - exists_?: boolean | null; - }; - /** - * ArtifactFilterTaskRunId - * @description Filter by `Artifact.task_run_id`. - */ - ArtifactFilterTaskRunId: { - /** - * Any - * @description A list of task run IDs to include - */ - any_?: string[] | null; - }; - /** - * ArtifactFilterType - * @description Filter by `Artifact.type`. - */ - ArtifactFilterType: { - /** - * Any - * @description A list of artifact types to include - */ - any_?: string[] | null; - /** - * Not Any - * @description A list of artifact types to exclude - */ - not_any_?: string[] | null; - }; - /** - * ArtifactSort - * @description Defines artifact sorting options. - * @enum {string} - */ - ArtifactSort: - | "CREATED_DESC" - | "UPDATED_DESC" - | "ID_DESC" - | "KEY_DESC" - | "KEY_ASC"; - /** - * ArtifactUpdate - * @description Data used by the Prefect REST API to update an artifact. - */ - ArtifactUpdate: { - /** Data */ - data?: Record | unknown | null; - /** Description */ - description?: string | null; - /** Metadata */ - metadata_?: { - [key: string]: string; - } | null; - }; - /** Automation */ - Automation: { - /** - * Name - * @description The name of this automation - */ - name: string; - /** - * Description - * @description A longer description of this automation - * @default - */ - description: string; - /** - * Enabled - * @description Whether this automation will be evaluated - * @default true - */ - enabled: boolean; - /** - * Trigger - * @description The criteria for which events this Automation covers and how it will respond to the presence or absence of those events - */ - trigger: - | components["schemas"]["EventTrigger"] - | components["schemas"]["CompoundTrigger-Output"] - | components["schemas"]["SequenceTrigger-Output"]; - /** - * Actions - * @description The actions to perform when this Automation triggers - */ - actions: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Actions On Trigger - * @description The actions to perform when an Automation goes into a triggered state - */ - actions_on_trigger?: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Actions On Resolve - * @description The actions to perform when an Automation goes into a resolving state - */ - actions_on_resolve?: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - }; - /** AutomationCreate */ - AutomationCreate: { - /** - * Name - * @description The name of this automation - */ - name: string; - /** - * Description - * @description A longer description of this automation - * @default - */ - description: string; - /** - * Enabled - * @description Whether this automation will be evaluated - * @default true - */ - enabled: boolean; - /** - * Trigger - * @description The criteria for which events this Automation covers and how it will respond to the presence or absence of those events - */ - trigger: - | components["schemas"]["EventTrigger"] - | components["schemas"]["CompoundTrigger-Input"] - | components["schemas"]["SequenceTrigger-Input"]; - /** - * Actions - * @description The actions to perform when this Automation triggers - */ - actions: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Actions On Trigger - * @description The actions to perform when an Automation goes into a triggered state - */ - actions_on_trigger?: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Actions On Resolve - * @description The actions to perform when an Automation goes into a resolving state - */ - actions_on_resolve?: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Owner Resource - * @description The resource to which this automation belongs - */ - owner_resource?: string | null; - }; - /** AutomationFilter */ - AutomationFilter: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** @description Filter criteria for `Automation.name` */ - name?: components["schemas"]["AutomationFilterName"] | null; - /** @description Filter criteria for `Automation.created` */ - created?: components["schemas"]["AutomationFilterCreated"] | null; - }; - /** - * AutomationFilterCreated - * @description Filter by `Automation.created`. - */ - AutomationFilterCreated: { - /** - * Before - * @description Only include automations created before this datetime - */ - before_?: string | null; - }; - /** - * AutomationFilterName - * @description Filter by `Automation.created`. - */ - AutomationFilterName: { - /** - * Any - * @description Only include automations with names that match any of these strings - */ - any_?: string[] | null; - }; - /** AutomationPartialUpdate */ - AutomationPartialUpdate: { - /** - * Enabled - * @description Whether this automation will be evaluated - * @default true - */ - enabled: boolean; - }; - /** - * AutomationSort - * @description Defines automations sorting options. - * @enum {string} - */ - AutomationSort: "CREATED_DESC" | "UPDATED_DESC" | "NAME_ASC" | "NAME_DESC"; - /** AutomationUpdate */ - AutomationUpdate: { - /** - * Name - * @description The name of this automation - */ - name: string; - /** - * Description - * @description A longer description of this automation - * @default - */ - description: string; - /** - * Enabled - * @description Whether this automation will be evaluated - * @default true - */ - enabled: boolean; - /** - * Trigger - * @description The criteria for which events this Automation covers and how it will respond to the presence or absence of those events - */ - trigger: - | components["schemas"]["EventTrigger"] - | components["schemas"]["CompoundTrigger-Input"] - | components["schemas"]["SequenceTrigger-Input"]; - /** - * Actions - * @description The actions to perform when this Automation triggers - */ - actions: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Actions On Trigger - * @description The actions to perform when an Automation goes into a triggered state - */ - actions_on_trigger?: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - /** - * Actions On Resolve - * @description The actions to perform when an Automation goes into a resolving state - */ - actions_on_resolve?: ( - | components["schemas"]["DoNothing"] - | components["schemas"]["RunDeployment"] - | components["schemas"]["PauseDeployment"] - | components["schemas"]["ResumeDeployment"] - | components["schemas"]["CancelFlowRun"] - | components["schemas"]["ChangeFlowRunState"] - | components["schemas"]["PauseWorkQueue"] - | components["schemas"]["ResumeWorkQueue"] - | components["schemas"]["SendNotification"] - | components["schemas"]["CallWebhook"] - | components["schemas"]["PauseAutomation"] - | components["schemas"]["ResumeAutomation"] - | components["schemas"]["SuspendFlowRun"] - | components["schemas"]["ResumeFlowRun"] - | components["schemas"]["PauseWorkPool"] - | components["schemas"]["ResumeWorkPool"] - )[]; - }; - /** - * BlockDocument - * @description An ORM representation of a block document. - */ - BlockDocument: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Name - * @description The block document's name. Not required for anonymous block documents. - */ - name?: string | null; - /** - * Data - * @description The block document's data - */ - data?: Record; - /** - * Block Schema Id - * Format: uuid - * @description A block schema ID - */ - block_schema_id: string; - /** @description The associated block schema */ - block_schema?: components["schemas"]["BlockSchema"] | null; - /** - * Block Type Id - * Format: uuid - * @description A block type ID - */ - block_type_id: string; - /** - * Block Type Name - * @description The associated block type's name - */ - block_type_name?: string | null; - /** @description The associated block type */ - block_type?: components["schemas"]["BlockType"] | null; - /** - * Block Document References - * @description Record of the block document's references - */ - block_document_references?: { - [key: string]: Record; - }; - /** - * Is Anonymous - * @description Whether the block is anonymous (anonymous blocks are usually created by Prefect automatically) - * @default false - */ - is_anonymous: boolean; - }; - /** - * BlockDocumentCreate - * @description Data used by the Prefect REST API to create a block document. - */ - BlockDocumentCreate: { - /** - * Name - * @description The block document's name. Not required for anonymous block documents. - */ - name?: string | null; - /** - * Data - * @description The block document's data - */ - data?: Record; - /** - * Block Schema Id - * Format: uuid - * @description A block schema ID - */ - block_schema_id: string; - /** - * Block Type Id - * Format: uuid - * @description A block type ID - */ - block_type_id: string; - /** - * Is Anonymous - * @description Whether the block is anonymous (anonymous blocks are usually created by Prefect automatically) - * @default false - */ - is_anonymous: boolean; - }; - /** - * BlockDocumentFilter - * @description Filter BlockDocuments. Only BlockDocuments matching all criteria will be returned - */ - BlockDocumentFilter: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** @description Filter criteria for `BlockDocument.id` */ - id?: components["schemas"]["BlockDocumentFilterId"] | null; - /** - * @description Filter criteria for `BlockDocument.is_anonymous`. Defaults to excluding anonymous blocks. - * @default { - * "eq_": false - * } - */ - is_anonymous: - | components["schemas"]["BlockDocumentFilterIsAnonymous"] - | null; - /** @description Filter criteria for `BlockDocument.block_type_id` */ - block_type_id?: - | components["schemas"]["BlockDocumentFilterBlockTypeId"] - | null; - /** @description Filter criteria for `BlockDocument.name` */ - name?: components["schemas"]["BlockDocumentFilterName"] | null; - }; - /** - * BlockDocumentFilterBlockTypeId - * @description Filter by `BlockDocument.block_type_id`. - */ - BlockDocumentFilterBlockTypeId: { - /** - * Any - * @description A list of block type ids to include - */ - any_?: string[] | null; - }; - /** - * BlockDocumentFilterId - * @description Filter by `BlockDocument.id`. - */ - BlockDocumentFilterId: { - /** - * Any - * @description A list of block ids to include - */ - any_?: string[] | null; - }; - /** - * BlockDocumentFilterIsAnonymous - * @description Filter by `BlockDocument.is_anonymous`. - */ - BlockDocumentFilterIsAnonymous: { - /** - * Eq - * @description Filter block documents for only those that are or are not anonymous. - */ - eq_?: boolean | null; - }; - /** - * BlockDocumentFilterName - * @description Filter by `BlockDocument.name`. - */ - BlockDocumentFilterName: { - /** - * Any - * @description A list of block names to include - */ - any_?: string[] | null; - /** - * Like - * @description A string to match block names against. This can include SQL wildcard characters like `%` and `_`. - */ - like_?: string | null; - }; - /** - * BlockDocumentSort - * @description Defines block document sorting options. - * @enum {string} - */ - BlockDocumentSort: "NAME_DESC" | "NAME_ASC" | "BLOCK_TYPE_AND_NAME_ASC"; - /** - * BlockDocumentUpdate - * @description Data used by the Prefect REST API to update a block document. - */ - BlockDocumentUpdate: { - /** - * Block Schema Id - * @description A block schema ID - */ - block_schema_id?: string | null; - /** - * Data - * @description The block document's data - */ - data?: Record; - /** - * Merge Existing Data - * @default true - */ - merge_existing_data: boolean; - }; - /** - * BlockSchema - * @description An ORM representation of a block schema. - */ - BlockSchema: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Checksum - * @description The block schema's unique checksum - */ - checksum: string; - /** - * Fields - * @description The block schema's field schema - */ - fields?: Record; - /** - * Block Type Id - * @description A block type ID - */ - block_type_id: string | null; - /** @description The associated block type */ - block_type?: components["schemas"]["BlockType"] | null; - /** - * Capabilities - * @description A list of Block capabilities - */ - capabilities?: string[]; - /** - * Version - * @description Human readable identifier for the block schema - * @default non-versioned - */ - version: string; - }; - /** - * BlockSchemaCreate - * @description Data used by the Prefect REST API to create a block schema. - */ - BlockSchemaCreate: { - /** - * Fields - * @description The block schema's field schema - */ - fields?: Record; - /** - * Block Type Id - * Format: uuid - * @description A block type ID - */ - block_type_id: string; - /** - * Capabilities - * @description A list of Block capabilities - */ - capabilities?: string[]; - /** - * Version - * @description Human readable identifier for the block schema - * @default non-versioned - */ - version: string; - }; - /** - * BlockSchemaFilter - * @description Filter BlockSchemas - */ - BlockSchemaFilter: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** @description Filter criteria for `BlockSchema.block_type_id` */ - block_type_id?: - | components["schemas"]["BlockSchemaFilterBlockTypeId"] - | null; - /** @description Filter criteria for `BlockSchema.capabilities` */ - block_capabilities?: - | components["schemas"]["BlockSchemaFilterCapabilities"] - | null; - /** @description Filter criteria for `BlockSchema.id` */ - id?: components["schemas"]["BlockSchemaFilterId"] | null; - /** @description Filter criteria for `BlockSchema.version` */ - version?: components["schemas"]["BlockSchemaFilterVersion"] | null; - }; - /** - * BlockSchemaFilterBlockTypeId - * @description Filter by `BlockSchema.block_type_id`. - */ - BlockSchemaFilterBlockTypeId: { - /** - * Any - * @description A list of block type ids to include - */ - any_?: string[] | null; - }; - /** - * BlockSchemaFilterCapabilities - * @description Filter by `BlockSchema.capabilities` - */ - BlockSchemaFilterCapabilities: { - /** - * All - * @description A list of block capabilities. Block entities will be returned only if an associated block schema has a superset of the defined capabilities. - */ - all_?: string[] | null; - }; - /** - * BlockSchemaFilterId - * @description Filter by BlockSchema.id - */ - BlockSchemaFilterId: { - /** - * Any - * @description A list of IDs to include - */ - any_?: string[] | null; - }; - /** - * BlockSchemaFilterVersion - * @description Filter by `BlockSchema.capabilities` - */ - BlockSchemaFilterVersion: { - /** - * Any - * @description A list of block schema versions. - */ - any_?: string[] | null; - }; - /** - * BlockType - * @description An ORM representation of a block type - */ - BlockType: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Name - * @description A block type's name - */ - name: string; - /** - * Slug - * @description A block type's slug - */ - slug: string; - /** - * Logo Url - * @description Web URL for the block type's logo - */ - logo_url?: string | null; - /** - * Documentation Url - * @description Web URL for the block type's documentation - */ - documentation_url?: string | null; - /** - * Description - * @description A short blurb about the corresponding block's intended use - */ - description?: string | null; - /** - * Code Example - * @description A code snippet demonstrating use of the corresponding block - */ - code_example?: string | null; - /** - * Is Protected - * @description Protected block types cannot be modified via API. - * @default false - */ - is_protected: boolean; - }; - /** - * BlockTypeCreate - * @description Data used by the Prefect REST API to create a block type. - */ - BlockTypeCreate: { - /** - * Name - * @description A block type's name - */ - name: string; - /** - * Slug - * @description A block type's slug - */ - slug: string; - /** - * Logo Url - * @description Web URL for the block type's logo - */ - logo_url?: string | null; - /** - * Documentation Url - * @description Web URL for the block type's documentation - */ - documentation_url?: string | null; - /** - * Description - * @description A short blurb about the corresponding block's intended use - */ - description?: string | null; - /** - * Code Example - * @description A code snippet demonstrating use of the corresponding block - */ - code_example?: string | null; - }; - /** - * BlockTypeFilter - * @description Filter BlockTypes - */ - BlockTypeFilter: { - /** @description Filter criteria for `BlockType.name` */ - name?: components["schemas"]["BlockTypeFilterName"] | null; - /** @description Filter criteria for `BlockType.slug` */ - slug?: components["schemas"]["BlockTypeFilterSlug"] | null; - }; - /** - * BlockTypeFilterName - * @description Filter by `BlockType.name` - */ - BlockTypeFilterName: { - /** - * Like - * @description A case-insensitive partial match. For example, passing 'marvin' will match 'marvin', 'sad-Marvin', and 'marvin-robot'. - */ - like_?: string | null; - }; - /** - * BlockTypeFilterSlug - * @description Filter by `BlockType.slug` - */ - BlockTypeFilterSlug: { - /** - * Any - * @description A list of slugs to match - */ - any_?: string[] | null; - }; - /** - * BlockTypeUpdate - * @description Data used by the Prefect REST API to update a block type. - */ - BlockTypeUpdate: { - /** Logo Url */ - logo_url?: string | null; - /** Documentation Url */ - documentation_url?: string | null; - /** Description */ - description?: string | null; - /** Code Example */ - code_example?: string | null; - }; - /** Body_average_flow_run_lateness_flow_runs_lateness_post */ - Body_average_flow_run_lateness_flow_runs_lateness_post: { - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - task_runs?: components["schemas"]["TaskRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - work_pool_queues?: components["schemas"]["WorkQueueFilter"] | null; - }; - /** Body_bulk_decrement_active_slots_v2_concurrency_limits_decrement_post */ - Body_bulk_decrement_active_slots_v2_concurrency_limits_decrement_post: { - /** Slots */ - slots: number; - /** Names */ - names: string[]; - /** Occupancy Seconds */ - occupancy_seconds?: number | null; - /** - * Create If Missing - * @default true - */ - create_if_missing: boolean; - }; - /** Body_bulk_increment_active_slots_v2_concurrency_limits_increment_post */ - Body_bulk_increment_active_slots_v2_concurrency_limits_increment_post: { - /** Slots */ - slots: number; - /** Names */ - names: string[]; - /** - * Mode - * @default concurrency - * @enum {string} - */ - mode: "concurrency" | "rate_limit"; - /** Create If Missing */ - create_if_missing?: boolean | null; - }; - /** Body_clear_database_admin_database_clear_post */ - Body_clear_database_admin_database_clear_post: { - /** - * Confirm - * @description Pass confirm=True to confirm you want to modify the database. - * @default false - */ - confirm: boolean; - }; - /** Body_count_account_events_events_count_by__countable__post */ - Body_count_account_events_events_count_by__countable__post: { - filter: components["schemas"]["EventFilter"]; - /** @default day */ - time_unit: components["schemas"]["TimeUnit"]; - /** - * Time Interval - * @default 1 - */ - time_interval: number; - }; - /** Body_count_artifacts_artifacts_count_post */ - Body_count_artifacts_artifacts_count_post: { - artifacts?: components["schemas"]["ArtifactFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - flows?: components["schemas"]["FlowFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - }; - /** Body_count_block_documents_block_documents_count_post */ - Body_count_block_documents_block_documents_count_post: { - block_documents?: components["schemas"]["BlockDocumentFilter"] | null; - block_types?: components["schemas"]["BlockTypeFilter"] | null; - block_schemas?: components["schemas"]["BlockSchemaFilter"] | null; - }; - /** Body_count_deployments_by_flow_ui_flows_count_deployments_post */ - Body_count_deployments_by_flow_ui_flows_count_deployments_post: { - /** Flow Ids */ - flow_ids: string[]; - }; - /** Body_count_deployments_deployments_count_post */ - Body_count_deployments_deployments_count_post: { - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - work_pool_queues?: components["schemas"]["WorkQueueFilter"]; - }; - /** Body_count_flow_runs_flow_runs_count_post */ - Body_count_flow_runs_flow_runs_count_post: { - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - work_pool_queues?: components["schemas"]["WorkQueueFilter"]; - }; - /** Body_count_flows_flows_count_post */ - Body_count_flows_flows_count_post: { - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - }; - /** Body_count_latest_artifacts_artifacts_latest_count_post */ - Body_count_latest_artifacts_artifacts_latest_count_post: { - artifacts?: components["schemas"]["ArtifactCollectionFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - flows?: components["schemas"]["FlowFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - }; - /** Body_count_task_runs_by_flow_run_ui_flow_runs_count_task_runs_post */ - Body_count_task_runs_by_flow_run_ui_flow_runs_count_task_runs_post: { - /** Flow Run Ids */ - flow_run_ids: string[]; - }; - /** Body_count_task_runs_task_runs_count_post */ - Body_count_task_runs_task_runs_count_post: { - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - }; - /** Body_count_variables_variables_count_post */ - Body_count_variables_variables_count_post: { - variables?: components["schemas"]["VariableFilter"] | null; - }; - /** Body_count_work_pools_work_pools_count_post */ - Body_count_work_pools_work_pools_count_post: { - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - }; - /** Body_create_database_admin_database_create_post */ - Body_create_database_admin_database_create_post: { - /** - * Confirm - * @description Pass confirm=True to confirm you want to modify the database. - * @default false - */ - confirm: boolean; - }; - /** Body_create_flow_run_input_flow_runs__id__input_post */ - Body_create_flow_run_input_flow_runs__id__input_post: { - /** - * Key - * @description The input key - */ - key: string; - /** - * Value - * Format: binary - * @description The value of the input - */ - value: string; - /** - * Sender - * @description The sender of the input - */ - sender?: string | null; - }; - /** Body_decrement_concurrency_limits_v1_concurrency_limits_decrement_post */ - Body_decrement_concurrency_limits_v1_concurrency_limits_decrement_post: { - /** - * Names - * @description The tags to release a slot for - */ - names: string[]; - /** - * Task Run Id - * Format: uuid - * @description The ID of the task run releasing the slot - */ - task_run_id: string; - }; - /** Body_drop_database_admin_database_drop_post */ - Body_drop_database_admin_database_drop_post: { - /** - * Confirm - * @description Pass confirm=True to confirm you want to modify the database. - * @default false - */ - confirm: boolean; - }; - /** Body_filter_flow_run_input_flow_runs__id__input_filter_post */ - Body_filter_flow_run_input_flow_runs__id__input_filter_post: { - /** - * Prefix - * @description The input key prefix - */ - prefix: string; - /** - * Limit - * @description The maximum number of results to return - * @default 1 - */ - limit: number; - /** - * Exclude Keys - * @description Exclude inputs with these keys - * @default [] - */ - exclude_keys: string[]; - }; - /** Body_flow_run_history_flow_runs_history_post */ - Body_flow_run_history_flow_runs_history_post: { - /** - * History Start - * Format: date-time - * @description The history's start time. - */ - history_start: string; - /** - * History End - * Format: date-time - * @description The history's end time. - */ - history_end: string; - /** - * History Interval - * Format: time-delta - * @description The size of each history interval, in seconds. Must be at least 1 second. - */ - history_interval: number; - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - work_queues?: components["schemas"]["WorkQueueFilter"]; - }; - /** Body_get_scheduled_flow_runs_for_deployments_deployments_get_scheduled_flow_runs_post */ - Body_get_scheduled_flow_runs_for_deployments_deployments_get_scheduled_flow_runs_post: { - /** - * Deployment Ids - * @description The deployment IDs to get scheduled runs for - */ - deployment_ids: string[]; - /** - * Scheduled Before - * Format: date-time - * @description The maximum time to look for scheduled flow runs - */ - scheduled_before?: string; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_get_scheduled_flow_runs_work_pools__name__get_scheduled_flow_runs_post */ - Body_get_scheduled_flow_runs_work_pools__name__get_scheduled_flow_runs_post: { - /** - * Work Queue Names - * @description The names of work pool queues - */ - work_queue_names?: string[]; - /** - * Scheduled Before - * Format: date-time - * @description The maximum time to look for scheduled flow runs - */ - scheduled_before?: string; - /** - * Scheduled After - * Format: date-time - * @description The minimum time to look for scheduled flow runs - */ - scheduled_after?: string; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_increment_concurrency_limits_v1_concurrency_limits_increment_post */ - Body_increment_concurrency_limits_v1_concurrency_limits_increment_post: { - /** - * Names - * @description The tags to acquire a slot for - */ - names: string[]; - /** - * Task Run Id - * Format: uuid - * @description The ID of the task run acquiring the slot - */ - task_run_id: string; - }; - /** Body_next_runs_by_flow_ui_flows_next_runs_post */ - Body_next_runs_by_flow_ui_flows_next_runs_post: { - /** Flow Ids */ - flow_ids: string[]; - }; - /** Body_paginate_deployments_deployments_paginate_post */ - Body_paginate_deployments_deployments_paginate_post: { - /** - * Page - * @default 1 - */ - page: number; - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - work_pool_queues?: components["schemas"]["WorkQueueFilter"]; - /** @default NAME_ASC */ - sort: components["schemas"]["DeploymentSort"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_paginate_flow_runs_flow_runs_paginate_post */ - Body_paginate_flow_runs_flow_runs_paginate_post: { - /** @default ID_DESC */ - sort: components["schemas"]["FlowRunSort"]; - /** - * Page - * @default 1 - */ - page: number; - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - task_runs?: components["schemas"]["TaskRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - work_pool_queues?: components["schemas"]["WorkQueueFilter"] | null; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_paginate_flows_flows_paginate_post */ - Body_paginate_flows_flows_paginate_post: { - /** - * Page - * @default 1 - */ - page: number; - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - task_runs?: components["schemas"]["TaskRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - /** @default NAME_ASC */ - sort: components["schemas"]["FlowSort"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_all_concurrency_limits_v2_v2_concurrency_limits_filter_post */ - Body_read_all_concurrency_limits_v2_v2_concurrency_limits_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_artifacts_artifacts_filter_post */ - Body_read_artifacts_artifacts_filter_post: { - /** @default ID_DESC */ - sort: components["schemas"]["ArtifactSort"]; - /** - * Offset - * @default 0 - */ - offset: number; - artifacts?: components["schemas"]["ArtifactFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - flows?: components["schemas"]["FlowFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_automations_automations_filter_post */ - Body_read_automations_automations_filter_post: { - /** @default NAME_ASC */ - sort: components["schemas"]["AutomationSort"]; - /** - * Offset - * @default 0 - */ - offset: number; - automations?: components["schemas"]["AutomationFilter"] | null; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_block_documents_block_documents_filter_post */ - Body_read_block_documents_block_documents_filter_post: { - block_documents?: components["schemas"]["BlockDocumentFilter"] | null; - block_types?: components["schemas"]["BlockTypeFilter"] | null; - block_schemas?: components["schemas"]["BlockSchemaFilter"] | null; - /** - * Include Secrets - * @description Whether to include sensitive values in the block document. - * @default false - */ - include_secrets: boolean; - /** @default NAME_ASC */ - sort: components["schemas"]["BlockDocumentSort"] | null; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_block_schemas_block_schemas_filter_post */ - Body_read_block_schemas_block_schemas_filter_post: { - block_schemas?: components["schemas"]["BlockSchemaFilter"] | null; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_block_types_block_types_filter_post */ - Body_read_block_types_block_types_filter_post: { - block_types?: components["schemas"]["BlockTypeFilter"] | null; - block_schemas?: components["schemas"]["BlockSchemaFilter"] | null; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_concurrency_limits_concurrency_limits_filter_post */ - Body_read_concurrency_limits_concurrency_limits_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_dashboard_task_run_counts_ui_task_runs_dashboard_counts_post */ - Body_read_dashboard_task_run_counts_ui_task_runs_dashboard_counts_post: { - task_runs: components["schemas"]["TaskRunFilter"]; - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - work_queues?: components["schemas"]["WorkQueueFilter"] | null; - }; - /** Body_read_deployments_deployments_filter_post */ - Body_read_deployments_deployments_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - work_pool_queues?: components["schemas"]["WorkQueueFilter"]; - /** @default NAME_ASC */ - sort: components["schemas"]["DeploymentSort"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_events_events_filter_post */ - Body_read_events_events_filter_post: { - /** @description Additional optional filter criteria to narrow down the set of Events */ - filter?: components["schemas"]["EventFilter"] | null; - /** - * Limit - * @description The number of events to return with each page - * @default 50 - */ - limit: number; - }; - /** Body_read_flow_run_history_ui_flow_runs_history_post */ - Body_read_flow_run_history_ui_flow_runs_history_post: { - /** @default EXPECTED_START_TIME_DESC */ - sort: components["schemas"]["FlowRunSort"]; - /** - * Limit - * @default 1000 - */ - limit: number; - /** - * Offset - * @default 0 - */ - offset: number; - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - }; - /** Body_read_flow_run_notification_policies_flow_run_notification_policies_filter_post */ - Body_read_flow_run_notification_policies_flow_run_notification_policies_filter_post: { - flow_run_notification_policy_filter?: components["schemas"]["FlowRunNotificationPolicyFilter"]; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_flow_runs_flow_runs_filter_post */ - Body_read_flow_runs_flow_runs_filter_post: { - /** @default ID_DESC */ - sort: components["schemas"]["FlowRunSort"]; - /** - * Offset - * @default 0 - */ - offset: number; - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - task_runs?: components["schemas"]["TaskRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - work_pool_queues?: components["schemas"]["WorkQueueFilter"] | null; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_flows_flows_filter_post */ - Body_read_flows_flows_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - work_pools?: components["schemas"]["WorkPoolFilter"]; - /** @default NAME_ASC */ - sort: components["schemas"]["FlowSort"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_latest_artifacts_artifacts_latest_filter_post */ - Body_read_latest_artifacts_artifacts_latest_filter_post: { - /** @default ID_DESC */ - sort: components["schemas"]["ArtifactCollectionSort"]; - /** - * Offset - * @default 0 - */ - offset: number; - artifacts?: components["schemas"]["ArtifactCollectionFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - flows?: components["schemas"]["FlowFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_logs_logs_filter_post */ - Body_read_logs_logs_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - logs?: components["schemas"]["LogFilter"]; - /** @default TIMESTAMP_ASC */ - sort: components["schemas"]["LogSort"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_saved_searches_saved_searches_filter_post */ - Body_read_saved_searches_saved_searches_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_task_run_counts_by_state_ui_task_runs_count_post */ - Body_read_task_run_counts_by_state_ui_task_runs_count_post: { - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - task_runs?: components["schemas"]["TaskRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - }; - /** Body_read_task_runs_task_runs_filter_post */ - Body_read_task_runs_task_runs_filter_post: { - /** @default ID_DESC */ - sort: components["schemas"]["TaskRunSort"]; - /** - * Offset - * @default 0 - */ - offset: number; - flows?: components["schemas"]["FlowFilter"] | null; - flow_runs?: components["schemas"]["FlowRunFilter"] | null; - task_runs?: components["schemas"]["TaskRunFilter"] | null; - deployments?: components["schemas"]["DeploymentFilter"] | null; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_task_workers_task_workers_filter_post */ - Body_read_task_workers_task_workers_filter_post: { - /** @description The task worker filter */ - task_worker_filter?: components["schemas"]["TaskWorkerFilter"] | null; - }; - /** Body_read_variables_variables_filter_post */ - Body_read_variables_variables_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - variables?: components["schemas"]["VariableFilter"] | null; - /** @default NAME_ASC */ - sort: components["schemas"]["VariableSort"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_work_pools_work_pools_filter_post */ - Body_read_work_pools_work_pools_filter_post: { - work_pools?: components["schemas"]["WorkPoolFilter"] | null; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_work_queue_runs_work_queues__id__get_runs_post */ - Body_read_work_queue_runs_work_queues__id__get_runs_post: { - /** - * Scheduled Before - * Format: date-time - * @description Only flow runs scheduled to start before this time will be returned. - */ - scheduled_before?: string; - /** - * Agent Id - * @description An optional unique identifier for the agent making this query. If provided, the Prefect REST API will track the last time this agent polled the work queue. - */ - agent_id?: string | null; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_work_queues_work_pools__work_pool_name__queues_filter_post */ - Body_read_work_queues_work_pools__work_pool_name__queues_filter_post: { - work_queues?: components["schemas"]["WorkQueueFilter"]; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_work_queues_work_queues_filter_post */ - Body_read_work_queues_work_queues_filter_post: { - /** - * Offset - * @default 0 - */ - offset: number; - work_queues?: components["schemas"]["WorkQueueFilter"]; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_read_workers_work_pools__work_pool_name__workers_filter_post */ - Body_read_workers_work_pools__work_pool_name__workers_filter_post: { - workers?: components["schemas"]["WorkerFilter"] | null; - /** - * Offset - * @default 0 - */ - offset: number; - /** - * Limit - * @description Defaults to PREFECT_API_DEFAULT_LIMIT if not provided. - */ - limit?: number; - }; - /** Body_reset_concurrency_limit_by_tag_concurrency_limits_tag__tag__reset_post */ - Body_reset_concurrency_limit_by_tag_concurrency_limits_tag__tag__reset_post: { - /** - * Slot Override - * @description Manual override for active concurrency limit slots. - */ - slot_override?: string[] | null; - }; - /** Body_resume_flow_run_flow_runs__id__resume_post */ - Body_resume_flow_run_flow_runs__id__resume_post: { - /** Run Input */ - run_input?: Record | null; - }; - /** Body_schedule_deployment_deployments__id__schedule_post */ - Body_schedule_deployment_deployments__id__schedule_post: { - /** - * Start Time - * Format: date-time - * @description The earliest date to schedule - */ - start_time?: string; - /** - * End Time - * Format: date-time - * @description The latest date to schedule - */ - end_time?: string; - /** - * Min Time - * Format: time-delta - * @description Runs will be scheduled until at least this long after the `start_time` - */ - min_time?: number; - /** - * Min Runs - * @description The minimum number of runs to schedule - */ - min_runs?: number; - /** - * Max Runs - * @description The maximum number of runs to schedule - */ - max_runs?: number; - }; - /** Body_set_flow_run_state_flow_runs__id__set_state_post */ - Body_set_flow_run_state_flow_runs__id__set_state_post: { - /** @description The intended state. */ - state: components["schemas"]["StateCreate"]; - /** - * Force - * @description If false, orchestration rules will be applied that may alter or prevent the state transition. If True, orchestration rules are not applied. - * @default false - */ - force: boolean; - }; - /** Body_set_task_run_state_task_runs__id__set_state_post */ - Body_set_task_run_state_task_runs__id__set_state_post: { - /** @description The intended state. */ - state: components["schemas"]["StateCreate"]; - /** - * Force - * @description If false, orchestration rules will be applied that may alter or prevent the state transition. If True, orchestration rules are not applied. - * @default false - */ - force: boolean; - }; - /** Body_task_run_history_task_runs_history_post */ - Body_task_run_history_task_runs_history_post: { - /** - * History Start - * Format: date-time - * @description The history's start time. - */ - history_start: string; - /** - * History End - * Format: date-time - * @description The history's end time. - */ - history_end: string; - /** - * History Interval - * Format: time-delta - * @description The size of each history interval, in seconds. Must be at least 1 second. - */ - history_interval: number; - flows?: components["schemas"]["FlowFilter"]; - flow_runs?: components["schemas"]["FlowRunFilter"]; - task_runs?: components["schemas"]["TaskRunFilter"]; - deployments?: components["schemas"]["DeploymentFilter"]; - }; - /** Body_validate_obj_ui_schemas_validate_post */ - Body_validate_obj_ui_schemas_validate_post: { - /** Json Schema */ - json_schema: Record; - /** Values */ - values: Record; - }; - /** Body_worker_heartbeat_work_pools__work_pool_name__workers_heartbeat_post */ - Body_worker_heartbeat_work_pools__work_pool_name__workers_heartbeat_post: { - /** - * Name - * @description The worker process name - */ - name: string; - /** - * Heartbeat Interval Seconds - * @description The worker's heartbeat interval in seconds - */ - heartbeat_interval_seconds?: number | null; - }; - /** - * CallWebhook - * @description Call a webhook when an Automation is triggered. - */ - CallWebhook: { - /** - * Type - * @default call-webhook - * @constant - * @enum {string} - */ - type: "call-webhook"; - /** - * Block Document Id - * Format: uuid - * @description The identifier of the webhook block to use - */ - block_document_id: string; - /** - * Payload - * @description An optional templatable payload to send when calling the webhook. - * @default - */ - payload: string; - }; - /** - * CancelFlowRun - * @description Cancels a flow run associated with the trigger - */ - CancelFlowRun: { - /** - * Type - * @default cancel-flow-run - * @constant - * @enum {string} - */ - type: "cancel-flow-run"; - }; - /** - * ChangeFlowRunState - * @description Changes the state of a flow run associated with the trigger - */ - ChangeFlowRunState: { - /** - * Type - * @default change-flow-run-state - * @constant - * @enum {string} - */ - type: "change-flow-run-state"; - /** - * Name - * @description The name of the state to change the flow run to - */ - name?: string | null; - /** @description The type of the state to change the flow run to */ - state: components["schemas"]["StateType"]; - /** - * Message - * @description An optional message to associate with the state change - */ - message?: string | null; - }; - /** - * CompoundTrigger - * @description A composite trigger that requires some number of triggers to have - * fired within the given time period - */ - "CompoundTrigger-Input": { - /** - * Type - * @default compound - * @constant - * @enum {string} - */ - type: "compound"; - /** - * Id - * Format: uuid - * @description The unique ID of this trigger - */ - id?: string; - /** Triggers */ - triggers: ( - | components["schemas"]["EventTrigger"] - | components["schemas"]["CompoundTrigger-Input"] - | components["schemas"]["SequenceTrigger-Input"] - )[]; - /** Within */ - within: number | null; - /** Require */ - require: number | ("any" | "all"); - }; - /** - * CompoundTrigger - * @description A composite trigger that requires some number of triggers to have - * fired within the given time period - */ - "CompoundTrigger-Output": { - /** - * Type - * @default compound - * @constant - * @enum {string} - */ - type: "compound"; - /** - * Id - * Format: uuid - * @description The unique ID of this trigger - */ - id?: string; - /** Triggers */ - triggers: ( - | components["schemas"]["EventTrigger"] - | components["schemas"]["CompoundTrigger-Output"] - | components["schemas"]["SequenceTrigger-Output"] - )[]; - /** Within */ - within: number | null; - /** Require */ - require: number | ("any" | "all"); - }; - /** - * ConcurrencyLimit - * @description An ORM representation of a concurrency limit. - */ - ConcurrencyLimit: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Tag - * @description A tag the concurrency limit is applied to. - */ - tag: string; - /** - * Concurrency Limit - * @description The concurrency limit. - */ - concurrency_limit: number; - /** - * Active Slots - * @description A list of active run ids using a concurrency slot - */ - active_slots?: string[]; - }; - /** - * ConcurrencyLimitCreate - * @description Data used by the Prefect REST API to create a concurrency limit. - */ - ConcurrencyLimitCreate: { - /** - * Tag - * @description A tag the concurrency limit is applied to. - */ - tag: string; - /** - * Concurrency Limit - * @description The concurrency limit. - */ - concurrency_limit: number; - }; - /** - * ConcurrencyLimitStrategy - * @description Enumeration of concurrency collision strategies. - * @enum {string} - */ - ConcurrencyLimitStrategy: "ENQUEUE" | "CANCEL_NEW"; - /** - * ConcurrencyLimitV2 - * @description An ORM representation of a v2 concurrency limit. - */ - ConcurrencyLimitV2: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Active - * @description Whether the concurrency limit is active. - * @default true - */ - active: boolean; - /** - * Name - * @description The name of the concurrency limit. - */ - name: string; - /** - * Limit - * @description The concurrency limit. - */ - limit: number; - /** - * Active Slots - * @description The number of active slots. - * @default 0 - */ - active_slots: number; - /** - * Denied Slots - * @description The number of denied slots. - * @default 0 - */ - denied_slots: number; - /** - * Slot Decay Per Second - * @description The decay rate for active slots when used as a rate limit. - * @default 0 - */ - slot_decay_per_second: number; - /** - * Avg Slot Occupancy Seconds - * @description The average amount of time a slot is occupied. - * @default 2 - */ - avg_slot_occupancy_seconds: number; - }; - /** - * ConcurrencyLimitV2Create - * @description Data used by the Prefect REST API to create a v2 concurrency limit. - */ - ConcurrencyLimitV2Create: { - /** - * Active - * @description Whether the concurrency limit is active. - * @default true - */ - active: boolean; - /** - * Name - * @description The name of the concurrency limit. - */ - name: string; - /** - * Limit - * @description The concurrency limit. - */ - limit: number; - /** - * Active Slots - * @description The number of active slots. - * @default 0 - */ - active_slots: number; - /** - * Denied Slots - * @description The number of denied slots. - * @default 0 - */ - denied_slots: number; - /** - * Slot Decay Per Second - * @description The decay rate for active slots when used as a rate limit. - * @default 0 - */ - slot_decay_per_second: number; - }; - /** - * ConcurrencyLimitV2Update - * @description Data used by the Prefect REST API to update a v2 concurrency limit. - */ - ConcurrencyLimitV2Update: { - /** Active */ - active?: boolean | null; - /** Name */ - name?: string | null; - /** Limit */ - limit?: number | null; - /** Active Slots */ - active_slots?: number | null; - /** Denied Slots */ - denied_slots?: number | null; - /** Slot Decay Per Second */ - slot_decay_per_second?: number | null; - }; - /** - * ConcurrencyOptions - * @description Class for storing the concurrency config in database. - */ - ConcurrencyOptions: { - collision_strategy: components["schemas"]["ConcurrencyLimitStrategy"]; - }; - /** - * Constant - * @description Represents constant input value to a task run. - */ - Constant: { - /** - * Input Type - * @default constant - * @constant - * @enum {string} - */ - input_type: "constant"; - /** Type */ - type: string; - }; - /** CountByState */ - CountByState: { - /** - * Completed - * @default 0 - */ - COMPLETED: number; - /** - * Pending - * @default 0 - */ - PENDING: number; - /** - * Running - * @default 0 - */ - RUNNING: number; - /** - * Failed - * @default 0 - */ - FAILED: number; - /** - * Cancelled - * @default 0 - */ - CANCELLED: number; - /** - * Crashed - * @default 0 - */ - CRASHED: number; - /** - * Paused - * @default 0 - */ - PAUSED: number; - /** - * Cancelling - * @default 0 - */ - CANCELLING: number; - /** - * Scheduled - * @default 0 - */ - SCHEDULED: number; - }; - /** - * Countable - * @enum {string} - */ - Countable: "day" | "time" | "event" | "resource"; - /** CreatedBy */ - CreatedBy: { - /** - * Id - * @description The id of the creator of the object. - */ - id?: string | null; - /** - * Type - * @description The type of the creator of the object. - */ - type?: string | null; - /** - * Display Value - * @description The display value for the creator. - */ - display_value?: string | null; - }; - /** - * CronSchedule - * @description Cron schedule - * - * NOTE: If the timezone is a DST-observing one, then the schedule will adjust - * itself appropriately. Cron's rules for DST are based on schedule times, not - * intervals. This means that an hourly cron schedule will fire on every new - * schedule hour, not every elapsed hour; for example, when clocks are set back - * this will result in a two-hour pause as the schedule will fire *the first - * time* 1am is reached and *the first time* 2am is reached, 120 minutes later. - * Longer schedules, such as one that fires at 9am every morning, will - * automatically adjust for DST. - * - * Args: - * cron (str): a valid cron string - * timezone (str): a valid timezone string in IANA tzdata format (for example, - * America/New_York). - * day_or (bool, optional): Control how croniter handles `day` and `day_of_week` - * entries. Defaults to True, matching cron which connects those values using - * OR. If the switch is set to False, the values are connected using AND. This - * behaves like fcron and enables you to e.g. define a job that executes each - * 2nd friday of a month by setting the days of month and the weekday. - */ - CronSchedule: { - /** Cron */ - cron: string; - /** Timezone */ - timezone?: string | null; - /** - * Day Or - * @description Control croniter behavior for handling day and day_of_week entries. - * @default true - */ - day_or: boolean; - }; - /** CsrfToken */ - CsrfToken: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Token - * @description The CSRF token - */ - token: string; - /** - * Client - * @description The client id associated with the CSRF token - */ - client: string; - /** - * Expiration - * Format: date-time - * @description The expiration time of the CSRF token - */ - expiration: string; - }; - /** DependencyResult */ - DependencyResult: { - /** - * Id - * Format: uuid - */ - id: string; - /** Name */ - name: string; - /** Upstream Dependencies */ - upstream_dependencies: components["schemas"]["TaskRunResult"][]; - state: components["schemas"]["State"] | null; - /** Expected Start Time */ - expected_start_time: string | null; - /** Start Time */ - start_time: string | null; - /** End Time */ - end_time: string | null; - /** Total Run Time */ - total_run_time: number | null; - /** Estimated Run Time */ - estimated_run_time: number | null; - /** Untrackable Result */ - untrackable_result: boolean; - }; - /** - * DeploymentCreate - * @description Data used by the Prefect REST API to create a deployment. - */ - DeploymentCreate: { - /** - * Name - * @description The name of the deployment. - */ - name: string; - /** - * Flow Id - * Format: uuid - * @description The ID of the flow associated with the deployment. - */ - flow_id: string; - /** - * Paused - * @description Whether or not the deployment is paused. - * @default false - */ - paused: boolean; - /** - * Schedules - * @description A list of schedules for the deployment. - */ - schedules?: components["schemas"]["DeploymentScheduleCreate"][]; - /** - * Concurrency Limit - * @description The deployment's concurrency limit. - */ - concurrency_limit?: number | null; - /** @description The deployment's concurrency options. */ - concurrency_options?: components["schemas"]["ConcurrencyOptions"] | null; - /** - * Enforce Parameter Schema - * @description Whether or not the deployment should enforce the parameter schema. - * @default true - */ - enforce_parameter_schema: boolean; - /** - * Parameter Openapi Schema - * @description The parameter schema of the flow, including defaults. - */ - parameter_openapi_schema?: Record | null; - /** - * Parameters - * @description Parameters for flow runs scheduled by the deployment. - */ - parameters?: Record; - /** - * Tags - * @description A list of deployment tags. - */ - tags?: string[]; - /** Pull Steps */ - pull_steps?: Record[] | null; - /** Work Queue Name */ - work_queue_name?: string | null; - /** - * Work Pool Name - * @description The name of the deployment's work pool. - */ - work_pool_name?: string | null; - /** Storage Document Id */ - storage_document_id?: string | null; - /** Infrastructure Document Id */ - infrastructure_document_id?: string | null; - /** Description */ - description?: string | null; - /** Path */ - path?: string | null; - /** Version */ - version?: string | null; - /** Entrypoint */ - entrypoint?: string | null; - /** - * Job Variables - * @description Overrides for the flow's infrastructure configuration. - */ - job_variables?: Record; - }; - /** - * DeploymentFilter - * @description Filter for deployments. Only deployments matching all criteria will be returned. - */ - DeploymentFilter: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** @description Filter criteria for `Deployment.id` */ - id?: components["schemas"]["DeploymentFilterId"] | null; - /** @description Filter criteria for `Deployment.name` */ - name?: components["schemas"]["DeploymentFilterName"] | null; - /** @description Filter criteria for `Deployment.name` or `Flow.name` */ - flow_or_deployment_name?: - | components["schemas"]["DeploymentOrFlowNameFilter"] - | null; - /** @description Filter criteria for `Deployment.paused` */ - paused?: components["schemas"]["DeploymentFilterPaused"] | null; - /** @description Filter criteria for `Deployment.tags` */ - tags?: components["schemas"]["DeploymentFilterTags"] | null; - /** @description Filter criteria for `Deployment.work_queue_name` */ - work_queue_name?: - | components["schemas"]["DeploymentFilterWorkQueueName"] - | null; - /** - * @deprecated - * @description DEPRECATED: Prefer `Deployment.concurrency_limit_id` over `Deployment.concurrency_limit`. If provided, will be ignored for backwards-compatibility. Will be removed after December 2024. - */ - concurrency_limit?: - | components["schemas"]["DeploymentFilterConcurrencyLimit"] - | null; - }; - /** - * DeploymentFilterConcurrencyLimit - * @description DEPRECATED: Prefer `Deployment.concurrency_limit_id` over `Deployment.concurrency_limit`. - */ - DeploymentFilterConcurrencyLimit: { - /** - * Ge - * @description Only include deployments with a concurrency limit greater than or equal to this value - */ - ge_?: number | null; - /** - * Le - * @description Only include deployments with a concurrency limit less than or equal to this value - */ - le_?: number | null; - /** - * Is Null - * @description If true, only include deployments without a concurrency limit - */ - is_null_?: boolean | null; - }; - /** - * DeploymentFilterId - * @description Filter by `Deployment.id`. - */ - DeploymentFilterId: { - /** - * Any - * @description A list of deployment ids to include - */ - any_?: string[] | null; - }; - /** - * DeploymentFilterName - * @description Filter by `Deployment.name`. - */ - DeploymentFilterName: { - /** - * Any - * @description A list of deployment names to include - */ - any_?: string[] | null; - /** - * Like - * @description A case-insensitive partial match. For example, passing 'marvin' will match 'marvin', 'sad-Marvin', and 'marvin-robot'. - */ - like_?: string | null; - }; - /** - * DeploymentFilterPaused - * @description Filter by `Deployment.paused`. - */ - DeploymentFilterPaused: { - /** - * Eq - * @description Only returns where deployment is/is not paused - */ - eq_?: boolean | null; - }; - /** - * DeploymentFilterTags - * @description Filter by `Deployment.tags`. - */ - DeploymentFilterTags: { - /** - * @description Operator for combining filter criteria. Defaults to 'and_'. - * @default and_ - */ - operator: components["schemas"]["Operator"]; - /** - * All - * @description A list of tags. Deployments will be returned only if their tags are a superset of the list - */ - all_?: string[] | null; - /** - * Is Null - * @description If true, only include deployments without tags - */ - is_null_?: boolean | null; - }; - /** - * DeploymentFilterWorkQueueName - * @description Filter by `Deployment.work_queue_name`. - */ - DeploymentFilterWorkQueueName: { - /** - * Any - * @description A list of work queue names to include - */ - any_?: string[] | null; - }; - /** - * DeploymentFlowRunCreate - * @description Data used by the Prefect REST API to create a flow run from a deployment. - */ - DeploymentFlowRunCreate: { - /** @description The state of the flow run to create */ - state?: components["schemas"]["StateCreate"] | null; - /** - * Name - * @description The name of the flow run. Defaults to a random slug if not specified. - */ - name?: string; - /** Parameters */ - parameters?: Record; - /** - * Enforce Parameter Schema - * @description Whether or not to enforce the parameter schema on this run. - */ - enforce_parameter_schema?: boolean | null; - /** Context */ - context?: Record; - /** Infrastructure Document Id */ - infrastructure_document_id?: string | null; - /** @description The empirical policy for the flow run. */ - empirical_policy?: components["schemas"]["FlowRunPolicy"]; - /** - * Tags - * @description A list of tags for the flow run. - */ - tags?: string[]; - /** - * Idempotency Key - * @description An optional idempotency key. If a flow run with the same idempotency key has already been created, the existing flow run will be returned. - */ - idempotency_key?: string | null; - /** Parent Task Run Id */ - parent_task_run_id?: string | null; - /** Work Queue Name */ - work_queue_name?: string | null; - /** Job Variables */ - job_variables?: Record | null; - }; - /** - * DeploymentOrFlowNameFilter - * @description Filter by `Deployment.name` or `Flow.name` with a single input string for ilike filtering. - */ - DeploymentOrFlowNameFilter: { - /** - * Like - * @description A case-insensitive partial match on deployment or flow names. For example, passing 'example' might match deployments or flows with 'example' in their names. - */ - like_?: string | null; - }; - /** DeploymentPaginationResponse */ - DeploymentPaginationResponse: { - /** Results */ - results: components["schemas"]["DeploymentResponse"][]; - /** Count */ - count: number; - /** Limit */ - limit: number; - /** Pages */ - pages: number; - /** Page */ - page: number; - }; - /** DeploymentResponse */ - DeploymentResponse: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Name - * @description The name of the deployment. - */ - name: string; - /** - * Version - * @description An optional version for the deployment. - */ - version?: string | null; - /** - * Description - * @description A description for the deployment. - */ - description?: string | null; - /** - * Flow Id - * Format: uuid - * @description The flow id associated with the deployment. - */ - flow_id: string; - /** - * Paused - * @description Whether or not the deployment is paused. - * @default false - */ - paused: boolean; - /** - * Schedules - * @description A list of schedules for the deployment. - */ - schedules?: components["schemas"]["DeploymentSchedule"][]; - /** - * Concurrency Limit - * @deprecated - * @description DEPRECATED: Prefer `global_concurrency_limit`. Will always be None for backwards compatibility. Will be removed after December 2024. - */ - concurrency_limit?: number | null; - /** @description The global concurrency limit object for enforcing the maximum number of flow runs that can be active at once. */ - global_concurrency_limit?: - | components["schemas"]["GlobalConcurrencyLimitResponse"] - | null; - /** @description The concurrency options for the deployment. */ - concurrency_options?: components["schemas"]["ConcurrencyOptions"] | null; - /** - * Job Variables - * @description Overrides to apply to the base infrastructure block at runtime. - */ - job_variables?: Record; - /** - * Parameters - * @description Parameters for flow runs scheduled by the deployment. - */ - parameters?: Record; - /** - * Tags - * @description A list of tags for the deployment - */ - tags?: string[]; - /** - * Work Queue Name - * @description The work queue for the deployment. If no work queue is set, work will not be scheduled. - */ - work_queue_name?: string | null; - /** - * Last Polled - * @description The last time the deployment was polled for status updates. - */ - last_polled?: string | null; - /** - * Parameter Openapi Schema - * @description The parameter schema of the flow, including defaults. - */ - parameter_openapi_schema?: Record | null; - /** - * Path - * @description The path to the working directory for the workflow, relative to remote storage or an absolute path. - */ - path?: string | null; - /** - * Pull Steps - * @description Pull steps for cloning and running this deployment. - */ - pull_steps?: Record[] | null; - /** - * Entrypoint - * @description The path to the entrypoint for the workflow, relative to the `path`. - */ - entrypoint?: string | null; - /** - * Storage Document Id - * @description The block document defining storage used for this flow. - */ - storage_document_id?: string | null; - /** - * Infrastructure Document Id - * @description The block document defining infrastructure to use for flow runs. - */ - infrastructure_document_id?: string | null; - /** @description Optional information about the creator of this deployment. */ - created_by?: components["schemas"]["CreatedBy"] | null; - /** @description Optional information about the updater of this deployment. */ - updated_by?: components["schemas"]["UpdatedBy"] | null; - /** - * Work Pool Name - * @description The name of the deployment's work pool. - */ - work_pool_name?: string | null; - /** - * @description Whether the deployment is ready to run flows. - * @default NOT_READY - */ - status: components["schemas"]["DeploymentStatus"] | null; - /** - * Enforce Parameter Schema - * @description Whether or not the deployment should enforce the parameter schema. - * @default true - */ - enforce_parameter_schema: boolean; - }; - /** DeploymentSchedule */ - DeploymentSchedule: { - /** - * Id - * Format: uuid - */ - id?: string; - /** Created */ - created?: string | null; - /** Updated */ - updated?: string | null; - /** - * Deployment Id - * @description The deployment id associated with this schedule. - */ - deployment_id?: string | null; - /** - * Schedule - * @description The schedule for the deployment. - */ - schedule: - | components["schemas"]["IntervalSchedule"] - | components["schemas"]["CronSchedule"] - | components["schemas"]["RRuleSchedule"]; - /** - * Active - * @description Whether or not the schedule is active. - * @default true - */ - active: boolean; - /** - * Max Scheduled Runs - * @description The maximum number of scheduled runs for the schedule. - */ - max_scheduled_runs?: number | null; - }; - /** DeploymentScheduleCreate */ - DeploymentScheduleCreate: { - /** - * Active - * @description Whether or not the schedule is active. - * @default true - */ - active: boolean; - /** - * Schedule - * @description The schedule for the deployment. - */ - schedule: - | components["schemas"]["IntervalSchedule"] - | components["schemas"]["CronSchedule"] - | components["schemas"]["RRuleSchedule"]; - /** - * Max Scheduled Runs - * @description The maximum number of scheduled runs for the schedule. - */ - max_scheduled_runs?: number | null; - }; - /** DeploymentScheduleUpdate */ - DeploymentScheduleUpdate: { - /** - * Active - * @description Whether or not the schedule is active. - */ - active?: boolean | null; - /** - * Schedule - * @description The schedule for the deployment. - */ - schedule?: - | components["schemas"]["IntervalSchedule"] - | components["schemas"]["CronSchedule"] - | components["schemas"]["RRuleSchedule"] - | null; - /** - * Max Scheduled Runs - * @description The maximum number of scheduled runs for the schedule. - */ - max_scheduled_runs?: number | null; - }; - /** - * DeploymentSort - * @description Defines deployment sorting options. - * @enum {string} - */ - DeploymentSort: "CREATED_DESC" | "UPDATED_DESC" | "NAME_ASC" | "NAME_DESC"; - /** - * DeploymentStatus - * @description Enumeration of deployment statuses. - * @enum {string} - */ - DeploymentStatus: "READY" | "NOT_READY"; - /** - * DeploymentUpdate - * @description Data used by the Prefect REST API to update a deployment. - */ - DeploymentUpdate: { - /** Version */ - version?: string | null; - /** Description */ - description?: string | null; - /** - * Paused - * @description Whether or not the deployment is paused. - * @default false - */ - paused: boolean; - /** - * Schedules - * @description A list of schedules for the deployment. - */ - schedules?: components["schemas"]["DeploymentScheduleCreate"][]; - /** - * Concurrency Limit - * @description The deployment's concurrency limit. - */ - concurrency_limit?: number | null; - /** @description The deployment's concurrency options. */ - concurrency_options?: components["schemas"]["ConcurrencyOptions"] | null; - /** - * Parameters - * @description Parameters for flow runs scheduled by the deployment. - */ - parameters?: Record | null; - /** - * Tags - * @description A list of deployment tags. - */ - tags?: string[]; - /** Work Queue Name */ - work_queue_name?: string | null; - /** - * Work Pool Name - * @description The name of the deployment's work pool. - */ - work_pool_name?: string | null; - /** Path */ - path?: string | null; - /** - * Job Variables - * @description Overrides for the flow's infrastructure configuration. - */ - job_variables?: Record | null; - /** Entrypoint */ - entrypoint?: string | null; - /** Storage Document Id */ - storage_document_id?: string | null; - /** Infrastructure Document Id */ - infrastructure_document_id?: string | null; - /** - * Enforce Parameter Schema - * @description Whether or not the deployment should enforce the parameter schema. - */ - enforce_parameter_schema?: boolean | null; - }; - /** - * DoNothing - * @description Do nothing when an Automation is triggered - */ - DoNothing: { - /** - * Type - * @default do-nothing - * @constant - * @enum {string} - */ - type: "do-nothing"; - }; - /** Edge */ - Edge: { - /** - * Id - * Format: uuid - */ - id: string; - }; - /** - * Event - * @description The client-side view of an event that has happened to a Resource - */ - Event: { - /** - * Occurred - * Format: date-time - * @description When the event happened from the sender's perspective - */ - occurred: string; - /** - * Event - * @description The name of the event that happened - */ - event: string; - /** @description The primary Resource this event concerns */ - resource: components["schemas"]["Resource"]; - /** - * Related - * @description A list of additional Resources involved in this event - */ - related?: components["schemas"]["RelatedResource"][]; - /** - * Payload - * @description An open-ended set of data describing what happened - */ - payload?: Record; - /** - * Id - * Format: uuid - * @description The client-provided identifier of this event - */ - id: string; - /** - * Follows - * @description The ID of an event that is known to have occurred prior to this one. If set, this may be used to establish a more precise ordering of causally-related events when they occur close enough together in time that the system may receive them out-of-order. - */ - follows?: string | null; - }; - /** EventAnyResourceFilter */ - EventAnyResourceFilter: { - /** - * Id - * @description Only include events for resources with these IDs - */ - id?: string[] | null; - /** - * Id Prefix - * @description Only include events for resources with IDs starting with these prefixes - */ - id_prefix?: string[] | null; - /** @description Only include events for related resources with these labels */ - labels?: components["schemas"]["ResourceSpecification"] | null; - }; - /** - * EventCount - * @description The count of events with the given filter value - */ - EventCount: { - /** - * Value - * @description The value to use for filtering - */ - value: string; - /** - * Label - * @description The value to display for this count - */ - label: string; - /** - * Count - * @description The count of matching events - */ - count: number; - /** - * Start Time - * Format: date-time - * @description The start time of this group of events - */ - start_time: string; - /** - * End Time - * Format: date-time - * @description The end time of this group of events - */ - end_time: string; - }; - /** EventFilter */ - EventFilter: { - /** @description Filter criteria for when the events occurred */ - occurred?: components["schemas"]["EventOccurredFilter"]; - /** @description Filter criteria for the event name */ - event?: components["schemas"]["EventNameFilter"] | null; - /** @description Filter criteria for any resource involved in the event */ - any_resource?: components["schemas"]["EventAnyResourceFilter"] | null; - /** @description Filter criteria for the resource of the event */ - resource?: components["schemas"]["EventResourceFilter"] | null; - /** @description Filter criteria for the related resources of the event */ - related?: components["schemas"]["EventRelatedFilter"] | null; - /** @description Filter criteria for the events' ID */ - id?: components["schemas"]["EventIDFilter"]; - /** - * @description The order to return filtered events - * @default DESC - */ - order: components["schemas"]["EventOrder"]; - }; - /** EventIDFilter */ - EventIDFilter: { - /** - * Id - * @description Only include events with one of these IDs - */ - id?: string[] | null; - }; - /** EventNameFilter */ - EventNameFilter: { - /** - * Prefix - * @description Only include events matching one of these prefixes - */ - prefix?: string[] | null; - /** - * Exclude Prefix - * @description Exclude events matching one of these prefixes - */ - exclude_prefix?: string[] | null; - /** - * Name - * @description Only include events matching one of these names exactly - */ - name?: string[] | null; - /** - * Exclude Name - * @description Exclude events matching one of these names exactly - */ - exclude_name?: string[] | null; - }; - /** EventOccurredFilter */ - EventOccurredFilter: { - /** - * Since - * Format: date-time - * @description Only include events after this time (inclusive) - */ - since?: string; - /** - * Until - * Format: date-time - * @description Only include events prior to this time (inclusive) - */ - until?: string; - }; - /** - * EventOrder - * @enum {string} - */ - EventOrder: "ASC" | "DESC"; - /** - * EventPage - * @description A single page of events returned from the API, with an optional link to the - * next page of results - */ - EventPage: { - /** - * Events - * @description The Events matching the query - */ - events: components["schemas"]["ReceivedEvent"][]; - /** - * Total - * @description The total number of matching Events - */ - total: number; - /** - * Next Page - * @description The URL for the next page of results, if there are more - */ - next_page: string | null; - }; - /** EventRelatedFilter */ - EventRelatedFilter: { - /** - * Id - * @description Only include events for related resources with these IDs - */ - id?: string[] | null; - /** - * Role - * @description Only include events for related resources in these roles - */ - role?: string[] | null; - /** - * Resources In Roles - * @description Only include events with specific related resources in specific roles - */ - resources_in_roles?: [string, string][] | null; - /** @description Only include events for related resources with these labels */ - labels?: components["schemas"]["ResourceSpecification"] | null; - }; - /** EventResourceFilter */ - EventResourceFilter: { - /** - * Id - * @description Only include events for resources with these IDs - */ - id?: string[] | null; - /** - * Id Prefix - * @description Only include events for resources with IDs starting with these prefixes. - */ - id_prefix?: string[] | null; - /** @description Only include events for resources with these labels */ - labels?: components["schemas"]["ResourceSpecification"] | null; - /** - * Distinct - * @description Only include events for distinct resources - * @default false - */ - distinct: boolean; - }; - /** - * EventTrigger - * @description A trigger that fires based on the presence or absence of events within a given - * period of time. - */ - EventTrigger: { - /** - * Type - * @default event - * @constant - * @enum {string} - */ - type: "event"; - /** - * Id - * Format: uuid - * @description The unique ID of this trigger - */ - id?: string; - /** @description Labels for resources which this trigger will match. */ - match?: components["schemas"]["ResourceSpecification"]; - /** @description Labels for related resources which this trigger will match. */ - match_related?: components["schemas"]["ResourceSpecification"]; - /** - * After - * @description The event(s) which must first been seen to fire this trigger. If empty, then fire this trigger immediately. Events may include trailing wildcards, like `prefect.flow-run.*` - */ - after?: string[]; - /** - * Expect - * @description The event(s) this trigger is expecting to see. If empty, this trigger will match any event. Events may include trailing wildcards, like `prefect.flow-run.*` - */ - expect?: string[]; - /** - * For Each - * @description Evaluate the trigger separately for each distinct value of these labels on the resource. By default, labels refer to the primary resource of the triggering event. You may also refer to labels from related resources by specifying `related::