Concurrency
}
taskRunView={
}
/>
diff --git a/ui-v2/src/components/concurrency/concurrency-tabs.tsx b/ui-v2/src/components/concurrency/concurrency-tabs.tsx
index b3396114bca5..2c7fdd94d6b9 100644
--- a/ui-v2/src/components/concurrency/concurrency-tabs.tsx
+++ b/ui-v2/src/components/concurrency/concurrency-tabs.tsx
@@ -1,34 +1,81 @@
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
-import { TAB_OPTIONS, TabOptions } from "./concurrency-constants";
+import { TabOptions } from "@/routes/concurrency-limits";
+import { getRouteApi } from "@tanstack/react-router";
+
+const routeApi = getRouteApi("/concurrency-limits");
+
+type TabOptionValues = {
+ /** Value of search value in url */
+ tabSearchValue: TabOptions;
+ /** Display value for the UI */
+ displayValue: string;
+};
+
+/** Maps url tab option to visual name */
+const TAB_OPTIONS: Record
= {
+ global: {
+ tabSearchValue: "global",
+ displayValue: "Global",
+ },
+ ["task-run"]: {
+ tabSearchValue: "task-run",
+ displayValue: "Task Run",
+ },
+} as const;
type Props = {
globalView: React.ReactNode;
- onValueChange: (value: TabOptions) => void;
taskRunView: React.ReactNode;
- value: TabOptions;
};
// TODO: Move Tabs for navigation to a generic styled component
export const ConcurrencyTabs = ({
globalView,
- onValueChange,
taskRunView,
- value,
}: Props): JSX.Element => {
+ const { tab } = routeApi.useSearch();
+ const navigate = routeApi.useNavigate();
+
return (
- onValueChange(value as TabOptions)}
- >
+
- {TAB_OPTIONS.Global}
- {TAB_OPTIONS["Task Run"]}
+ {
+ void navigate({
+ to: "/concurrency-limits",
+ search: (prev) => ({
+ ...prev,
+ tab: TAB_OPTIONS.global.tabSearchValue,
+ }),
+ });
+ }}
+ >
+ {TAB_OPTIONS.global.displayValue}
+
+
+ {
+ void navigate({
+ to: "/concurrency-limits",
+ search: (prev) => ({
+ ...prev,
+ tab: TAB_OPTIONS["task-run"].tabSearchValue,
+ }),
+ });
+ }}
+ >
+ {TAB_OPTIONS["task-run"].displayValue}
+
- {globalView}
- {taskRunView}
+
+ {globalView}
+
+
+ {taskRunView}
+
);
};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
index 20b5b7b78a63..033d5fb162ed 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
@@ -1,9 +1,13 @@
+import { useListGlobalConcurrencyLimits } from "@/hooks/global-concurrency-limits";
import { useState } from "react";
+
import { GlobalConcurrencyLimitsHeader } from "./global-concurrency-limits-header";
export const GlobalConcurrencyView = () => {
const [showAddDialog, setShowAddDialog] = useState(false);
+ const { data } = useListGlobalConcurrencyLimits();
+
const openAddDialog = () => setShowAddDialog(true);
const closeAddDialog = () => setShowAddDialog(false);
@@ -12,6 +16,12 @@ export const GlobalConcurrencyView = () => {
+ TODO
+
+ {data.map((limit) => (
+ - {JSON.stringify(limit)}
+ ))}
+
{showAddDialog && TODO: DIALOG
}
>
);
diff --git a/ui-v2/src/hooks/global-concurrency-limits.ts b/ui-v2/src/hooks/global-concurrency-limits.ts
index 1fd78bb4fd23..cf3c5473a62e 100644
--- a/ui-v2/src/hooks/global-concurrency-limits.ts
+++ b/ui-v2/src/hooks/global-concurrency-limits.ts
@@ -1,10 +1,11 @@
import type { components } from "@/api/prefect";
import { getQueryService } from "@/api/service";
import {
+ QueryClient,
queryOptions,
useMutation,
- useQuery,
useQueryClient,
+ useSuspenseQuery,
} from "@tanstack/react-query";
export type GlobalConcurrencyLimit =
@@ -31,7 +32,7 @@ export const queryKeyFactory = {
// ----- 🔑 Queries 🗄️
// ----------------------------
export const buildListGlobalConcurrencyLimitsQuery = (
- filter: GlobalConcurrencyLimitsFilter,
+ filter: GlobalConcurrencyLimitsFilter = { offset: 0 },
) =>
queryOptions({
queryKey: queryKeyFactory.list(filter),
@@ -47,11 +48,19 @@ export const buildListGlobalConcurrencyLimitsQuery = (
/**
*
* @param filter
- * @returns list of global concurrency limits as a QueryResult object
+ * @returns list of global concurrency limits as a SuspenseQueryResult object
*/
+
export const useListGlobalConcurrencyLimits = (
- filter: GlobalConcurrencyLimitsFilter,
-) => useQuery(buildListGlobalConcurrencyLimitsQuery(filter));
+ filter: GlobalConcurrencyLimitsFilter = { offset: 0 },
+) => useSuspenseQuery(buildListGlobalConcurrencyLimitsQuery(filter));
+
+useListGlobalConcurrencyLimits.loader = ({
+ context,
+}: {
+ context: { queryClient: QueryClient };
+}) =>
+ context.queryClient.ensureQueryData(buildListGlobalConcurrencyLimitsQuery());
// ----- ✍🏼 Mutations 🗄️
// ----------------------------
diff --git a/ui-v2/src/routes/concurrency-limits.tsx b/ui-v2/src/routes/concurrency-limits.tsx
index e425901c1452..c5bfdaebe741 100644
--- a/ui-v2/src/routes/concurrency-limits.tsx
+++ b/ui-v2/src/routes/concurrency-limits.tsx
@@ -1,11 +1,24 @@
+import { ConcurrencyPage } from "@/components/concurrency/concurrency-page";
+import { useListGlobalConcurrencyLimits } from "@/hooks/global-concurrency-limits";
import { createFileRoute } from "@tanstack/react-router";
+import { zodSearchValidator } from "@tanstack/router-zod-adapter";
+import { z } from "zod";
-import { ConcurrencyPage } from "@/components/concurrency/concurrency-page";
+/**
+ * Schema for validating URL search parameters for the Concurrency Limits page.
+ * @property {'global' | 'task-run'} tab used designate which tab view to display
+ */
+const searchParams = z
+ .object({
+ tab: z.enum(["global", "task-run"]).default("global"),
+ })
+ .strict();
+
+export type TabOptions = z.infer["tab"];
export const Route = createFileRoute("/concurrency-limits")({
- component: RouteComponent,
+ validateSearch: zodSearchValidator(searchParams),
+ component: ConcurrencyPage,
+ wrapInSuspense: true,
+ loader: useListGlobalConcurrencyLimits.loader,
});
-
-function RouteComponent() {
- return ;
-}
From 9d92835f79abc536fad0f04ac01e6df2063b7174 Mon Sep 17 00:00:00 2001
From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com>
Date: Fri, 6 Dec 2024 11:29:32 -0800
Subject: [PATCH 24/92] [UI v2] experiment: Adds basic spacing utilities as
props and new layout components: Block and Flex (#16249)
---
.../concurrency/concurrency-page.tsx | 15 +-
.../global-concurrency-limits-header.tsx | 5 +-
ui-v2/src/components/ui/block.tsx | 23 +
ui-v2/src/components/ui/flex.tsx | 20 +
.../src/components/ui/utils/spacing-utils.ts | 451 ++++++++++++++++++
5 files changed, 504 insertions(+), 10 deletions(-)
create mode 100644 ui-v2/src/components/ui/block.tsx
create mode 100644 ui-v2/src/components/ui/flex.tsx
create mode 100644 ui-v2/src/components/ui/utils/spacing-utils.ts
diff --git a/ui-v2/src/components/concurrency/concurrency-page.tsx b/ui-v2/src/components/concurrency/concurrency-page.tsx
index d513deec48e7..045a890c6d00 100644
--- a/ui-v2/src/components/concurrency/concurrency-page.tsx
+++ b/ui-v2/src/components/concurrency/concurrency-page.tsx
@@ -1,3 +1,4 @@
+import { Flex } from "@/components/ui/flex";
import { Typography } from "@/components/ui/typography";
import { ConcurrencyTabs } from "./concurrency-tabs";
@@ -6,14 +7,12 @@ import { TaskRunConcurrencyView } from "./task-run-concurrenct-view";
export const ConcurrencyPage = (): JSX.Element => {
return (
-
+
Concurrency
-
- }
- taskRunView={}
- />
-
-
+ }
+ taskRunView={}
+ />
+
);
};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx
index d64699434c3a..e0b809dd3db3 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx
@@ -1,4 +1,5 @@
import { Button } from "@/components/ui/button";
+import { Flex } from "@/components/ui/flex";
import { Icon } from "@/components/ui/icons";
import { Typography } from "@/components/ui/typography";
@@ -8,11 +9,11 @@ type Props = {
export const GlobalConcurrencyLimitsHeader = ({ onAdd }: Props) => {
return (
-
+
Global Concurrency Limits
-
+
);
};
diff --git a/ui-v2/src/components/ui/block.tsx b/ui-v2/src/components/ui/block.tsx
new file mode 100644
index 000000000000..adbde80942af
--- /dev/null
+++ b/ui-v2/src/components/ui/block.tsx
@@ -0,0 +1,23 @@
+import { cn } from "@/lib/utils";
+import { createElement, forwardRef } from "react";
+import { UtilityProps, spacingUtiltiesClasses } from "./utils/spacing-utils";
+
+type Props = Omit<
+ UtilityProps,
+ "alignItems" | "alignSelf" | "display" | "flexDirection" | "gap"
+> & {
+ className?: string;
+ children: React.ReactNode;
+};
+
+export const Block = forwardRef(
+ ({ className, ...props }, ref) => {
+ return createElement("div", {
+ className: cn("block", spacingUtiltiesClasses(props), className),
+ ref,
+ ...props,
+ });
+ },
+);
+
+Block.displayName = "Block";
diff --git a/ui-v2/src/components/ui/flex.tsx b/ui-v2/src/components/ui/flex.tsx
new file mode 100644
index 000000000000..1109db0e6455
--- /dev/null
+++ b/ui-v2/src/components/ui/flex.tsx
@@ -0,0 +1,20 @@
+import { cn } from "@/lib/utils";
+import { createElement, forwardRef } from "react";
+import { UtilityProps, spacingUtiltiesClasses } from "./utils/spacing-utils";
+
+type Props = Omit & {
+ className?: string;
+ children: React.ReactNode;
+};
+
+export const Flex = forwardRef(
+ ({ className, ...props }, ref) => {
+ return createElement("div", {
+ className: cn("flex", spacingUtiltiesClasses(props), className),
+ ref,
+ ...props,
+ });
+ },
+);
+
+Flex.displayName = "Flex";
diff --git a/ui-v2/src/components/ui/utils/spacing-utils.ts b/ui-v2/src/components/ui/utils/spacing-utils.ts
new file mode 100644
index 000000000000..e6ce04342c20
--- /dev/null
+++ b/ui-v2/src/components/ui/utils/spacing-utils.ts
@@ -0,0 +1,451 @@
+import { cva } from "class-variance-authority";
+
+type Displays =
+ | "hidden"
+ | "block"
+ | "contents"
+ | "flex"
+ | "flow-root"
+ | "grid"
+ | "inline"
+ | "inline-block"
+ | "inline-flex"
+ | "inline-grid"
+ | "list-item";
+
+type FlexDirection = "column" | "column-reverse" | "row-reverse" | "row";
+
+type AlignContent =
+ | "center"
+ | "start"
+ | "end"
+ | "stretch"
+ | "space-between"
+ | "space-around"
+ | "space-evenly";
+type AlignItems = "center" | "start" | "end" | "stretch" | "baseline";
+
+type AlignSelf = "auto" | "center" | "start" | "end" | "stretch" | "baseline";
+
+type Spaces =
+ | 0
+ | 0.5
+ | 1
+ | 1.5
+ | 2
+ | 2.5
+ | 3
+ | 4
+ | 5
+ | 6
+ | 8
+ | 10
+ | 12
+ | 14
+ | 16
+ | 20
+ | 24
+ | 32
+ | 48
+ | 64;
+
+export type UtilityProps = Partial<{
+ alignContent: AlignContent;
+ alignItems: AlignItems;
+ alignSelf: AlignSelf;
+ display: Displays;
+ flexDirection: FlexDirection;
+ gap: Spaces;
+ m: Spaces;
+ mb: Spaces;
+ ml: Spaces;
+ mr: Spaces;
+ mt: Spaces;
+ mx: Spaces;
+ my: Spaces;
+ p: Spaces;
+ pb: Spaces;
+ pl: Spaces;
+ pr: Spaces;
+ pt: Spaces;
+ px: Spaces;
+ py: Spaces;
+}>;
+
+export const spacingUtiltiesClasses = cva("", {
+ variants: {
+ alignContent: {
+ center: "content-center",
+ start: "content-start",
+ end: "content-end",
+ stretch: "content-stretch",
+ ["space-between"]: "content-between",
+ ["space-around"]: "content-around",
+ ["space-evenly"]: "content-evenly",
+ },
+ alignItems: {
+ center: "items-center",
+ start: "items-start",
+ end: "items-end",
+ stretch: "items-stretch",
+ baseline: "items-baseline",
+ },
+ alignSelf: {
+ auto: "self-auto",
+ center: "self-center",
+ start: "self-start",
+ end: "self-end",
+ stretch: "self-stretch",
+ baseline: "self-baseline",
+ },
+ display: {
+ hidden: "hidden",
+ block: "block",
+ contents: "contents",
+ flex: "flex",
+ ["flow-root"]: "flow-root",
+ grid: "grid",
+ inline: "inline",
+ ["inline-block"]: "inline-block",
+ ["inline-flex"]: "inline-flex",
+ ["inline-grid"]: "inline-grid",
+ ["list-item"]: "list-item",
+ },
+ flexDirection: {
+ column: "flex-col",
+ ["column-reverse"]: "flex-col-reverse",
+ ["row-reverse"]: "flex-row-reverse",
+ row: "flex-row",
+ },
+ gap: {
+ 0: "gap-0",
+ 0.5: "gap-0.5",
+ 1: "gap-1",
+ 1.5: "gap-1.5",
+ 2: "gap-2",
+ 2.5: "gap-2.5",
+ 3: "gap-3",
+ 4: "gap-4",
+ 5: "gap-5",
+ 6: "gap-6",
+ 8: "gap-8",
+ 10: "gap-10",
+ 12: "gap-12",
+ 14: "gap-14",
+ 16: "gap-16",
+ 20: "gap-20",
+ 24: "gap-24",
+ 32: "gap-32",
+ 48: "gap-48",
+ 64: "gap-64",
+ },
+ m: {
+ 0: "m-0",
+ 0.5: "m-0.5",
+ 1: "m-1",
+ 1.5: "m-1.5",
+ 2: "m-2",
+ 2.5: "m-2.5",
+ 3: "m-3",
+ 4: "m-4",
+ 5: "m-5",
+ 6: "m-6",
+ 8: "m-8",
+ 10: "m-10",
+ 12: "m-12",
+ 14: "m-14",
+ 16: "m-16",
+ 20: "m-20",
+ 24: "m-24",
+ 32: "m-32",
+ 48: "m-48",
+ 64: "m-64",
+ },
+ mb: {
+ 0: "mb-0",
+ 0.5: "mb-0.5",
+ 1: "mb-1",
+ 1.5: "mb-1.5",
+ 2: "mb-2",
+ 2.5: "mb-2.5",
+ 3: "mb-3",
+ 4: "mb-4",
+ 5: "mb-5",
+ 6: "mb-6",
+ 8: "mb-8",
+ 10: "mb-10",
+ 12: "mb-12",
+ 14: "mb-14",
+ 16: "mb-16",
+ 20: "mb-20",
+ 24: "mb-24",
+ 32: "mb-32",
+ 48: "mb-48",
+ 64: "mb-64",
+ },
+ ml: {
+ 0: "ml-0",
+ 0.5: "ml-0.5",
+ 1: "ml-1",
+ 1.5: "ml-1.5",
+ 2: "ml-2",
+ 2.5: "ml-2.5",
+ 3: "ml-3",
+ 4: "ml-4",
+ 5: "ml-5",
+ 6: "ml-6",
+ 8: "ml-8",
+ 10: "ml-10",
+ 12: "ml-12",
+ 14: "ml-14",
+ 16: "ml-16",
+ 20: "ml-20",
+ 24: "ml-24",
+ 32: "ml-32",
+ 48: "ml-48",
+ 64: "ml-64",
+ },
+ mr: {
+ 0: "mr-0",
+ 0.5: "mr-0.5",
+ 1: "mr-1",
+ 1.5: "mr-1.5",
+ 2: "mr-2",
+ 2.5: "mr-2.5",
+ 3: "mr-3",
+ 4: "mr-4",
+ 5: "mr-5",
+ 6: "mr-6",
+ 8: "mr-8",
+ 10: "mr-10",
+ 12: "mr-12",
+ 14: "mr-14",
+ 16: "mr-16",
+ 20: "mr-20",
+ 24: "mr-24",
+ 32: "mr-32",
+ 48: "mr-48",
+ 64: "mr-64",
+ },
+ mt: {
+ 0: "mt-0",
+ 0.5: "mt-0.5",
+ 1: "mt-1",
+ 1.5: "mt-1.5",
+ 2: "mt-2",
+ 2.5: "mt-2.5",
+ 3: "mt-3",
+ 4: "mt-4",
+ 5: "mt-5",
+ 6: "mt-6",
+ 8: "mt-8",
+ 10: "mt-10",
+ 12: "mt-12",
+ 14: "mt-14",
+ 16: "mt-16",
+ 20: "mt-20",
+ 24: "mt-24",
+ 32: "mt-32",
+ 48: "mt-48",
+ 64: "mt-64",
+ },
+ mx: {
+ 0: "mx-0",
+ 0.5: "mx-0.5",
+ 1: "mx-1",
+ 1.5: "mx-1.5",
+ 2: "mx-2",
+ 2.5: "mx-2.5",
+ 3: "mx-3",
+ 4: "mx-4",
+ 5: "mx-5",
+ 6: "mx-6",
+ 8: "mx-8",
+ 10: "mx-10",
+ 12: "mx-12",
+ 14: "mx-14",
+ 16: "mx-16",
+ 20: "mx-20",
+ 24: "mx-24",
+ 32: "mx-32",
+ 48: "mx-48",
+ 64: "mx-64",
+ },
+ my: {
+ 0: "my-0",
+ 0.5: "my-0.5",
+ 1: "my-1",
+ 1.5: "my-1.5",
+ 2: "my-2",
+ 2.5: "my-2.5",
+ 3: "my-3",
+ 4: "my-4",
+ 5: "my-5",
+ 6: "my-6",
+ 8: "my-8",
+ 10: "my-10",
+ 12: "my-12",
+ 14: "my-14",
+ 16: "my-16",
+ 20: "my-20",
+ 24: "my-24",
+ 32: "my-32",
+ 48: "my-48",
+ 64: "my-64",
+ },
+ p: {
+ 0: "p-0",
+ 0.5: "p-0.5",
+ 1: "p-1",
+ 1.5: "p-1.5",
+ 2: "p-2",
+ 2.5: "p-2.5",
+ 3: "p-3",
+ 4: "p-4",
+ 5: "p-5",
+ 6: "p-6",
+ 8: "p-8",
+ 10: "p-10",
+ 12: "p-12",
+ 14: "p-14",
+ 16: "p-16",
+ 20: "p-20",
+ 24: "p-24",
+ 32: "p-32",
+ 48: "p-48",
+ 64: "p-64",
+ },
+ pb: {
+ 0: "pb-0",
+ 0.5: "pb-0.5",
+ 1: "pb-1",
+ 1.5: "pb-1.5",
+ 2: "pb-2",
+ 2.5: "pb-2.5",
+ 3: "pb-3",
+ 4: "pb-4",
+ 5: "pb-5",
+ 6: "pb-6",
+ 8: "pb-8",
+ 10: "pb-10",
+ 12: "pb-12",
+ 14: "pb-14",
+ 16: "pb-16",
+ 20: "pb-20",
+ 24: "pb-24",
+ 32: "pb-32",
+ 48: "pb-48",
+ 64: "pb-64",
+ },
+ pl: {
+ 0: "pl-0",
+ 0.5: "pl-0.5",
+ 1: "pl-1",
+ 1.5: "pl-1.5",
+ 2: "pl-2",
+ 2.5: "pl-2.5",
+ 3: "pl-3",
+ 4: "pl-4",
+ 5: "pl-5",
+ 6: "pl-6",
+ 8: "pl-8",
+ 10: "pl-10",
+ 12: "pl-12",
+ 14: "pl-14",
+ 16: "pl-16",
+ 20: "pl-20",
+ 24: "pl-24",
+ 32: "pl-32",
+ 48: "pl-48",
+ 64: "pl-64",
+ },
+ pr: {
+ 0: "pr-0",
+ 0.5: "pr-0.5",
+ 1: "pr-1",
+ 1.5: "pr-1.5",
+ 2: "pr-2",
+ 2.5: "pr-2.5",
+ 3: "pr-3",
+ 4: "pr-4",
+ 5: "pr-5",
+ 6: "pr-6",
+ 8: "pr-8",
+ 10: "pr-10",
+ 12: "pr-12",
+ 14: "pr-14",
+ 16: "pr-16",
+ 20: "pr-20",
+ 24: "pr-24",
+ 32: "pr-32",
+ 48: "pr-48",
+ 64: "pr-64",
+ },
+ pt: {
+ 0: "pt-0",
+ 0.5: "pt-0.5",
+ 1: "pt-1",
+ 1.5: "pt-1.5",
+ 2: "pt-2",
+ 2.5: "pt-2.5",
+ 3: "pt-3",
+ 4: "pt-4",
+ 5: "pt-5",
+ 6: "pt-6",
+ 8: "pt-8",
+ 10: "pt-10",
+ 12: "pt-12",
+ 14: "pt-14",
+ 16: "pt-16",
+ 20: "pt-20",
+ 24: "pt-24",
+ 32: "pt-32",
+ 48: "pt-48",
+ 64: "pt-64",
+ },
+ px: {
+ 0: "px-0",
+ 0.5: "px-0.5",
+ 1: "px-1",
+ 1.5: "px-1.5",
+ 2: "px-2",
+ 2.5: "px-2.5",
+ 3: "px-3",
+ 4: "px-4",
+ 5: "px-5",
+ 6: "px-6",
+ 8: "px-8",
+ 10: "px-10",
+ 12: "px-12",
+ 14: "px-14",
+ 16: "px-16",
+ 20: "px-20",
+ 24: "px-24",
+ 32: "px-32",
+ 48: "px-48",
+ 64: "px-64",
+ },
+ py: {
+ 0: "py-0",
+ 0.5: "py-0.5",
+ 1: "py-1",
+ 1.5: "py-1.5",
+ 2: "py-2",
+ 2.5: "py-2.5",
+ 3: "py-3",
+ 4: "py-4",
+ 5: "py-5",
+ 6: "py-6",
+ 8: "py-8",
+ 10: "py-10",
+ 12: "py-12",
+ 14: "py-14",
+ 16: "py-16",
+ 20: "py-20",
+ 24: "py-24",
+ 32: "py-32",
+ 48: "py-48",
+ 64: "py-64",
+ },
+ },
+});
From c012b002d41293985eb0fbb39bd0365950c9d45d Mon Sep 17 00:00:00 2001
From: Chris Pickett
Date: Fri, 6 Dec 2024 15:19:23 -0500
Subject: [PATCH 25/92] Add work-pool id/name to labels donated by worker to
flow run (#16252)
---
src/prefect/workers/base.py | 22 +++++++++++++++-------
tests/workers/test_base_worker.py | 7 ++++++-
2 files changed, 21 insertions(+), 8 deletions(-)
diff --git a/src/prefect/workers/base.py b/src/prefect/workers/base.py
index 7220c2e83d23..097352621119 100644
--- a/src/prefect/workers/base.py
+++ b/src/prefect/workers/base.py
@@ -53,6 +53,7 @@
Pending,
exception_to_failed_state,
)
+from prefect.types import KeyValueLabels
from prefect.utilities.dispatch import get_registry_for_type, register_base_type
from prefect.utilities.engine import propose_state
from prefect.utilities.services import critical_service_loop
@@ -1222,13 +1223,20 @@ async def _give_worker_labels_to_flow_run(self, flow_run_id: UUID):
Give this worker's identifying labels to the specified flow run.
"""
if self._cloud_client:
- await self._cloud_client.update_flow_run_labels(
- flow_run_id,
- {
- "prefect.worker.name": self.name,
- "prefect.worker.type": self.type,
- },
- )
+ labels: KeyValueLabels = {
+ "prefect.worker.name": self.name,
+ "prefect.worker.type": self.type,
+ }
+
+ if self._work_pool:
+ labels.update(
+ {
+ "prefect.work-pool.name": self._work_pool.name,
+ "prefect.work-pool.id": str(self._work_pool.id),
+ }
+ )
+
+ await self._cloud_client.update_flow_run_labels(flow_run_id, labels)
async def __aenter__(self):
self._logger.debug("Entering worker context...")
diff --git a/tests/workers/test_base_worker.py b/tests/workers/test_base_worker.py
index dc870a4a07b6..257d61088954 100644
--- a/tests/workers/test_base_worker.py
+++ b/tests/workers/test_base_worker.py
@@ -2109,7 +2109,12 @@ def create_run_with_deployment(state):
CloudClientMock.update_flow_run_labels.assert_awaited_once_with(
flow_run.id,
- {"prefect.worker.name": worker.name, "prefect.worker.type": worker.type},
+ {
+ "prefect.worker.name": worker.name,
+ "prefect.worker.type": worker.type,
+ "prefect.work-pool.name": work_pool.name,
+ "prefect.work-pool.id": str(work_pool.id),
+ },
)
From eb1fd0eaa5660acd5009e640d61ebd2d03bc320d Mon Sep 17 00:00:00 2001
From: nate nowack
Date: Fri, 6 Dec 2024 14:20:29 -0600
Subject: [PATCH 26/92] make `flow` and `task` decorators pass strict typing
(#16251)
---
src/prefect/_internal/concurrency/api.py | 19 ++--
src/prefect/client/schemas/objects.py | 2 +-
src/prefect/context.py | 17 +--
src/prefect/flow_engine.py | 4 +-
src/prefect/flows.py | 126 +++++++++++------------
src/prefect/task_runners.py | 4 +-
src/prefect/tasks.py | 40 +++----
src/prefect/utilities/visualization.py | 28 ++---
8 files changed, 123 insertions(+), 117 deletions(-)
diff --git a/src/prefect/_internal/concurrency/api.py b/src/prefect/_internal/concurrency/api.py
index 6b9eadaa02eb..bcfa6ae189db 100644
--- a/src/prefect/_internal/concurrency/api.py
+++ b/src/prefect/_internal/concurrency/api.py
@@ -7,6 +7,7 @@
import concurrent.futures
import contextlib
from typing import (
+ Any,
Awaitable,
Callable,
ContextManager,
@@ -31,11 +32,11 @@
P = ParamSpec("P")
T = TypeVar("T")
-Future = Union[concurrent.futures.Future, asyncio.Future]
+Future = Union[concurrent.futures.Future[T], asyncio.Future[T]]
def create_call(__fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> Call[T]:
- return Call.new(__fn, *args, **kwargs)
+ return Call[T].new(__fn, *args, **kwargs)
def _cast_to_call(call_like: Union[Callable[[], T], Call[T]]) -> Call[T]:
@@ -48,9 +49,9 @@ def _cast_to_call(call_like: Union[Callable[[], T], Call[T]]) -> Call[T]:
class _base(abc.ABC):
@abc.abstractstaticmethod
def wait_for_call_in_loop_thread(
- __call: Union[Callable[[], T], Call[T]],
+ __call: Union[Callable[[], T], Call[T]], # type: ignore[reportGeneralTypeIssues]
timeout: Optional[float] = None,
- done_callbacks: Optional[Iterable[Call]] = None,
+ done_callbacks: Optional[Iterable[Call[Any]]] = None,
) -> T:
"""
Schedule a function in the global worker thread and wait for completion.
@@ -61,9 +62,9 @@ def wait_for_call_in_loop_thread(
@abc.abstractstaticmethod
def wait_for_call_in_new_thread(
- __call: Union[Callable[[], T], Call[T]],
+ __call: Union[Callable[[], T], Call[T]], # type: ignore[reportGeneralTypeIssues]
timeout: Optional[float] = None,
- done_callbacks: Optional[Iterable[Call]] = None,
+ done_callbacks: Optional[Iterable[Call[Any]]] = None,
) -> T:
"""
Schedule a function in a new worker thread.
@@ -132,8 +133,8 @@ class from_async(_base):
async def wait_for_call_in_loop_thread(
__call: Union[Callable[[], Awaitable[T]], Call[Awaitable[T]]],
timeout: Optional[float] = None,
- done_callbacks: Optional[Iterable[Call]] = None,
- contexts: Optional[Iterable[ContextManager]] = None,
+ done_callbacks: Optional[Iterable[Call[Any]]] = None,
+ contexts: Optional[Iterable[ContextManager[Any]]] = None,
) -> Awaitable[T]:
call = _cast_to_call(__call)
waiter = AsyncWaiter(call)
@@ -150,7 +151,7 @@ async def wait_for_call_in_loop_thread(
async def wait_for_call_in_new_thread(
__call: Union[Callable[[], T], Call[T]],
timeout: Optional[float] = None,
- done_callbacks: Optional[Iterable[Call]] = None,
+ done_callbacks: Optional[Iterable[Call[Any]]] = None,
) -> T:
call = _cast_to_call(__call)
waiter = AsyncWaiter(call=call)
diff --git a/src/prefect/client/schemas/objects.py b/src/prefect/client/schemas/objects.py
index df4661d65d1b..ccd802b3dda4 100644
--- a/src/prefect/client/schemas/objects.py
+++ b/src/prefect/client/schemas/objects.py
@@ -796,7 +796,7 @@ class TaskRun(ObjectBaseModel):
description="A list of tags for the task run.",
examples=[["tag-1", "tag-2"]],
)
- labels: KeyValueLabelsField
+ labels: KeyValueLabelsField = Field(default_factory=dict)
state_id: Optional[UUID] = Field(
default=None, description="The id of the current task run state."
)
diff --git a/src/prefect/context.py b/src/prefect/context.py
index 2b391f57b1b9..675812de85c2 100644
--- a/src/prefect/context.py
+++ b/src/prefect/context.py
@@ -25,7 +25,6 @@
Union,
)
-import pendulum
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
from pydantic_extra_types.pendulum_dt import DateTime
from typing_extensions import Self
@@ -50,6 +49,8 @@
from prefect.utilities.services import start_client_metrics_server
T = TypeVar("T")
+P = TypeVar("P")
+R = TypeVar("R")
if TYPE_CHECKING:
from prefect.flows import Flow
@@ -121,8 +122,8 @@ class ContextModel(BaseModel):
"""
# The context variable for storing data must be defined by the child class
- __var__: ContextVar
- _token: Optional[Token] = PrivateAttr(None)
+ __var__: ContextVar[Self]
+ _token: Optional[Token[Self]] = PrivateAttr(None)
model_config = ConfigDict(
arbitrary_types_allowed=True,
extra="forbid",
@@ -150,7 +151,7 @@ def get(cls: Type[Self]) -> Optional[Self]:
return cls.__var__.get(None)
def model_copy(
- self: Self, *, update: Optional[Dict[str, Any]] = None, deep: bool = False
+ self: Self, *, update: Optional[Mapping[str, Any]] = None, deep: bool = False
):
"""
Duplicate the context model, optionally choosing which fields to include, exclude, or change.
@@ -310,11 +311,11 @@ def __init__(self, *args, **kwargs):
start_client_metrics_server()
- start_time: DateTime = Field(default_factory=lambda: pendulum.now("UTC"))
+ start_time: DateTime = Field(default_factory=lambda: DateTime.now("UTC"))
input_keyset: Optional[Dict[str, Dict[str, str]]] = None
client: Union[PrefectClient, SyncPrefectClient]
- def serialize(self):
+ def serialize(self: Self) -> Dict[str, Any]:
return self.model_dump(
include={"start_time", "input_keyset"},
exclude_unset=True,
@@ -336,9 +337,9 @@ class EngineContext(RunContext):
flow_run_states: A list of states for flow runs created within this flow run
"""
- flow: Optional["Flow"] = None
+ flow: Optional["Flow[Any, Any]"] = None
flow_run: Optional[FlowRun] = None
- task_runner: TaskRunner
+ task_runner: TaskRunner[Any]
log_prints: bool = False
parameters: Optional[Dict[str, Any]] = None
diff --git a/src/prefect/flow_engine.py b/src/prefect/flow_engine.py
index 239d9306ffb1..fb5ec172ab4e 100644
--- a/src/prefect/flow_engine.py
+++ b/src/prefect/flow_engine.py
@@ -1392,7 +1392,7 @@ async def run_generator_flow_async(
flow: Flow[P, R],
flow_run: Optional[FlowRun] = None,
parameters: Optional[Dict[str, Any]] = None,
- wait_for: Optional[Iterable[PrefectFuture]] = None,
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
return_type: Literal["state", "result"] = "result",
) -> AsyncGenerator[R, None]:
if return_type != "result":
@@ -1430,7 +1430,7 @@ def run_flow(
flow: Flow[P, R],
flow_run: Optional[FlowRun] = None,
parameters: Optional[Dict[str, Any]] = None,
- wait_for: Optional[Iterable[PrefectFuture]] = None,
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
return_type: Literal["state", "result"] = "result",
) -> Union[R, State, None]:
kwargs = dict(
diff --git a/src/prefect/flows.py b/src/prefect/flows.py
index fcc4e675bda6..a58d1db716c8 100644
--- a/src/prefect/flows.py
+++ b/src/prefect/flows.py
@@ -23,13 +23,11 @@
Awaitable,
Callable,
Coroutine,
- Dict,
Generic,
+ Hashable,
Iterable,
- List,
NoReturn,
Optional,
- Set,
Tuple,
Type,
TypeVar,
@@ -186,7 +184,9 @@ def __init__(
flow_run_name: Optional[Union[Callable[[], str], str]] = None,
retries: Optional[int] = None,
retry_delay_seconds: Optional[Union[int, float]] = None,
- task_runner: Union[Type[TaskRunner], TaskRunner, None] = None,
+ task_runner: Union[
+ Type[TaskRunner[PrefectFuture[R]]], TaskRunner[PrefectFuture[R]], None
+ ] = None,
description: Optional[str] = None,
timeout_seconds: Union[int, float, None] = None,
validate_parameters: bool = True,
@@ -196,14 +196,14 @@ def __init__(
cache_result_in_memory: bool = True,
log_prints: Optional[bool] = None,
on_completion: Optional[
- List[Callable[[FlowSchema, FlowRun, State], None]]
+ list[Callable[[FlowSchema, FlowRun, State], None]]
] = None,
- on_failure: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_failure: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
on_cancellation: Optional[
- List[Callable[[FlowSchema, FlowRun, State], None]]
+ list[Callable[[FlowSchema, FlowRun, State], None]]
] = None,
- on_crashed: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
- on_running: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_crashed: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_running: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
):
if name is not None and not isinstance(name, str):
raise TypeError(
@@ -411,14 +411,14 @@ def with_options(
cache_result_in_memory: Optional[bool] = None,
log_prints: Optional[bool] = NotSet, # type: ignore
on_completion: Optional[
- List[Callable[[FlowSchema, FlowRun, State], None]]
+ list[Callable[[FlowSchema, FlowRun, State], None]]
] = None,
- on_failure: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_failure: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
on_cancellation: Optional[
- List[Callable[[FlowSchema, FlowRun, State], None]]
+ list[Callable[[FlowSchema, FlowRun, State], None]]
] = None,
- on_crashed: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
- on_running: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_crashed: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_running: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
) -> Self:
"""
Create a new flow from the current object, updating provided options.
@@ -522,7 +522,7 @@ def with_options(
new_flow._entrypoint = self._entrypoint
return new_flow
- def validate_parameters(self, parameters: Dict[str, Any]) -> Dict[str, Any]:
+ def validate_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]:
"""
Validate parameters for compatibility with the flow by attempting to cast the inputs to the
associated types specified by the function's type annotations.
@@ -599,7 +599,7 @@ def resolve_block_reference(data: Any) -> Any:
}
return cast_parameters
- def serialize_parameters(self, parameters: Dict[str, Any]) -> Dict[str, Any]:
+ def serialize_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]:
"""
Convert parameters to a serializable form.
@@ -646,14 +646,14 @@ async def to_deployment(
schedules: Optional["FlexibleScheduleList"] = None,
concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
parameters: Optional[dict] = None,
- triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
+ triggers: Optional[list[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
description: Optional[str] = None,
- tags: Optional[List[str]] = None,
+ tags: Optional[list[str]] = None,
version: Optional[str] = None,
enforce_parameter_schema: bool = True,
work_pool_name: Optional[str] = None,
work_queue_name: Optional[str] = None,
- job_variables: Optional[Dict[str, Any]] = None,
+ job_variables: Optional[dict[str, Any]] = None,
entrypoint_type: EntrypointType = EntrypointType.FILE_PATH,
) -> "RunnerDeployment":
"""
@@ -756,32 +756,32 @@ def my_other_flow(name):
)
def on_completion(
- self, fn: Callable[["Flow", FlowRun, State], None]
- ) -> Callable[["Flow", FlowRun, State], None]:
+ self, fn: Callable[[FlowSchema, FlowRun, State], None]
+ ) -> Callable[[FlowSchema, FlowRun, State], None]:
self.on_completion_hooks.append(fn)
return fn
def on_cancellation(
- self, fn: Callable[["Flow", FlowRun, State], None]
- ) -> Callable[["Flow", FlowRun, State], None]:
+ self, fn: Callable[[FlowSchema, FlowRun, State], None]
+ ) -> Callable[[FlowSchema, FlowRun, State], None]:
self.on_cancellation_hooks.append(fn)
return fn
def on_crashed(
- self, fn: Callable[["Flow", FlowRun, State], None]
- ) -> Callable[["Flow", FlowRun, State], None]:
+ self, fn: Callable[[FlowSchema, FlowRun, State], None]
+ ) -> Callable[[FlowSchema, FlowRun, State], None]:
self.on_crashed_hooks.append(fn)
return fn
def on_running(
- self, fn: Callable[["Flow", FlowRun, State], None]
- ) -> Callable[["Flow", FlowRun, State], None]:
+ self, fn: Callable[[FlowSchema, FlowRun, State], None]
+ ) -> Callable[[FlowSchema, FlowRun, State], None]:
self.on_running_hooks.append(fn)
return fn
def on_failure(
- self, fn: Callable[["Flow", FlowRun, State], None]
- ) -> Callable[["Flow", FlowRun, State], None]:
+ self, fn: Callable[[FlowSchema, FlowRun, State], None]
+ ) -> Callable[[FlowSchema, FlowRun, State], None]:
self.on_failure_hooks.append(fn)
return fn
@@ -801,10 +801,10 @@ def serve(
paused: Optional[bool] = None,
schedules: Optional["FlexibleScheduleList"] = None,
global_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
- triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
- parameters: Optional[dict] = None,
+ triggers: Optional[list[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
+ parameters: Optional[dict[str, Any]] = None,
description: Optional[str] = None,
- tags: Optional[List[str]] = None,
+ tags: Optional[list[str]] = None,
version: Optional[str] = None,
enforce_parameter_schema: bool = True,
pause_on_shutdown: bool = True,
@@ -1056,17 +1056,17 @@ async def deploy(
build: bool = True,
push: bool = True,
work_queue_name: Optional[str] = None,
- job_variables: Optional[dict] = None,
+ job_variables: Optional[dict[str, Any]] = None,
interval: Optional[Union[int, float, datetime.timedelta]] = None,
cron: Optional[str] = None,
rrule: Optional[str] = None,
paused: Optional[bool] = None,
- schedules: Optional[List[DeploymentScheduleCreate]] = None,
+ schedules: Optional[list[DeploymentScheduleCreate]] = None,
concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
- triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
- parameters: Optional[dict] = None,
+ triggers: Optional[list[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
+ parameters: Optional[dict[str, Any]] = None,
description: Optional[str] = None,
- tags: Optional[List[str]] = None,
+ tags: Optional[list[str]] = None,
version: Optional[str] = None,
enforce_parameter_schema: bool = True,
entrypoint_type: EntrypointType = EntrypointType.FILE_PATH,
@@ -1289,7 +1289,7 @@ def __call__(
self,
*args: "P.args",
return_state: bool = False,
- wait_for: Optional[Iterable[PrefectFuture]] = None,
+ wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
**kwargs: "P.kwargs",
):
"""
@@ -1361,7 +1361,7 @@ def __call__(
)
@sync_compatible
- async def visualize(self, *args, **kwargs):
+ async def visualize(self, *args: "P.args", **kwargs: "P.kwargs"):
"""
Generates a graphviz object representing the current flow. In IPython notebooks,
it's rendered inline, otherwise in a new window as a PNG.
@@ -1390,7 +1390,7 @@ async def visualize(self, *args, **kwargs):
try:
with TaskVizTracker() as tracker:
if self.isasync:
- await self.fn(*args, **kwargs)
+ await self.fn(*args, **kwargs) # type: ignore[reportGeneralTypeIssues]
else:
self.fn(*args, **kwargs)
@@ -1433,7 +1433,7 @@ def flow(
flow_run_name: Optional[Union[Callable[[], str], str]] = None,
retries: Optional[int] = None,
retry_delay_seconds: Optional[Union[int, float]] = None,
- task_runner: Optional[TaskRunner] = None,
+ task_runner: Optional[TaskRunner[PrefectFuture[R]]] = None,
description: Optional[str] = None,
timeout_seconds: Union[int, float, None] = None,
validate_parameters: bool = True,
@@ -1443,29 +1443,29 @@ def flow(
cache_result_in_memory: bool = True,
log_prints: Optional[bool] = None,
on_completion: Optional[
- List[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
+ list[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
] = None,
on_failure: Optional[
- List[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
+ list[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
] = None,
on_cancellation: Optional[
- List[Callable[[FlowSchema, FlowRun, State], None]]
+ list[Callable[[FlowSchema, FlowRun, State], None]]
] = None,
- on_crashed: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
- on_running: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_crashed: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_running: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
) -> Callable[[Callable[P, R]], Flow[P, R]]:
...
def flow(
- __fn=None,
+ __fn: Optional[Callable[P, R]] = None,
*,
name: Optional[str] = None,
version: Optional[str] = None,
flow_run_name: Optional[Union[Callable[[], str], str]] = None,
retries: Optional[int] = None,
retry_delay_seconds: Union[int, float, None] = None,
- task_runner: Optional[TaskRunner] = None,
+ task_runner: Optional[TaskRunner[PrefectFuture[R]]] = None,
description: Optional[str] = None,
timeout_seconds: Union[int, float, None] = None,
validate_parameters: bool = True,
@@ -1475,16 +1475,16 @@ def flow(
cache_result_in_memory: bool = True,
log_prints: Optional[bool] = None,
on_completion: Optional[
- List[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
+ list[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
] = None,
on_failure: Optional[
- List[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
+ list[Callable[[FlowSchema, FlowRun, State], Union[Awaitable[None], None]]]
] = None,
on_cancellation: Optional[
- List[Callable[[FlowSchema, FlowRun, State], None]]
+ list[Callable[[FlowSchema, FlowRun, State], None]]
] = None,
- on_crashed: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
- on_running: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_crashed: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
+ on_running: Optional[list[Callable[[FlowSchema, FlowRun, State], None]]] = None,
):
"""
Decorator to designate a function as a Prefect workflow.
@@ -2041,10 +2041,10 @@ def load_placeholder_flow(entrypoint: str, raises: Exception):
def _base_placeholder():
raise raises
- def sync_placeholder_flow(*args, **kwargs):
+ def sync_placeholder_flow(*args: "P.args", **kwargs: "P.kwargs"):
_base_placeholder()
- async def async_placeholder_flow(*args, **kwargs):
+ async def async_placeholder_flow(*args: "P.args", **kwargs: "P.kwargs"):
_base_placeholder()
placeholder_flow = (
@@ -2059,7 +2059,7 @@ async def async_placeholder_flow(*args, **kwargs):
return Flow(**arguments)
-def safe_load_flow_from_entrypoint(entrypoint: str) -> Optional[Flow]:
+def safe_load_flow_from_entrypoint(entrypoint: str) -> Optional[Flow[P, Any]]:
"""
Load a flow from an entrypoint and return None if an exception is raised.
@@ -2084,8 +2084,8 @@ def safe_load_flow_from_entrypoint(entrypoint: str) -> Optional[Flow]:
def _sanitize_and_load_flow(
- func_def: Union[ast.FunctionDef, ast.AsyncFunctionDef], namespace: Dict[str, Any]
-) -> Optional[Flow]:
+ func_def: Union[ast.FunctionDef, ast.AsyncFunctionDef], namespace: dict[str, Any]
+) -> Optional[Flow[P, Any]]:
"""
Attempt to load a flow from the function definition after sanitizing the annotations
and defaults that can't be compiled.
@@ -2122,7 +2122,7 @@ def _sanitize_and_load_flow(
arg.annotation = None
# Remove defaults that can't be compiled
- new_defaults = []
+ new_defaults: list[Any] = []
for default in func_def.args.defaults:
try:
code = compile(ast.Expression(default), "", "eval")
@@ -2142,7 +2142,7 @@ def _sanitize_and_load_flow(
func_def.args.defaults = new_defaults
# Remove kw_defaults that can't be compiled
- new_kw_defaults = []
+ new_kw_defaults: list[Any] = []
for default in func_def.args.kw_defaults:
if default is not None:
try:
@@ -2201,8 +2201,8 @@ def _sanitize_and_load_flow(
def load_flow_arguments_from_entrypoint(
- entrypoint: str, arguments: Optional[Union[List[str], Set[str]]] = None
-) -> dict[str, Any]:
+ entrypoint: str, arguments: Optional[Union[list[str], set[str]]] = None
+) -> dict[Hashable, Any]:
"""
Extract flow arguments from an entrypoint string.
@@ -2235,7 +2235,7 @@ def load_flow_arguments_from_entrypoint(
"log_prints",
}
- result = {}
+ result: dict[Hashable, Any] = {}
for decorator in func_def.decorator_list:
if (
diff --git a/src/prefect/task_runners.py b/src/prefect/task_runners.py
index 734bcf73b560..497e34c1fbf5 100644
--- a/src/prefect/task_runners.py
+++ b/src/prefect/task_runners.py
@@ -97,9 +97,9 @@ def submit(
def map(
self,
- task: "Task",
+ task: "Task[P, R]",
parameters: Dict[str, Any],
- wait_for: Optional[Iterable[PrefectFuture]] = None,
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
) -> PrefectFutureList[F]:
"""
Submit multiple tasks to the task run engine.
diff --git a/src/prefect/tasks.py b/src/prefect/tasks.py
index cc271ec226c2..784deec2813d 100644
--- a/src/prefect/tasks.py
+++ b/src/prefect/tasks.py
@@ -1179,7 +1179,7 @@ def map(
self: "Task[P, R]",
*args: Any,
return_state: Literal[True],
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = ...,
deferred: bool = ...,
**kwargs: Any,
) -> List[State[R]]:
@@ -1189,7 +1189,7 @@ def map(
def map(
self: "Task[P, R]",
*args: Any,
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = ...,
deferred: bool = ...,
**kwargs: Any,
) -> PrefectFutureList[R]:
@@ -1200,7 +1200,7 @@ def map(
self: "Task[P, R]",
*args: Any,
return_state: Literal[True],
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = ...,
deferred: bool = ...,
**kwargs: Any,
) -> List[State[R]]:
@@ -1210,7 +1210,7 @@ def map(
def map(
self: "Task[P, R]",
*args: Any,
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = ...,
deferred: bool = ...,
**kwargs: Any,
) -> PrefectFutureList[R]:
@@ -1221,7 +1221,7 @@ def map(
self: "Task[P, Coroutine[Any, Any, R]]",
*args: Any,
return_state: Literal[True],
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = ...,
deferred: bool = ...,
**kwargs: Any,
) -> List[State[R]]:
@@ -1232,7 +1232,7 @@ def map(
self: "Task[P, Coroutine[Any, Any, R]]",
*args: Any,
return_state: Literal[False],
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = ...,
deferred: bool = ...,
**kwargs: Any,
) -> PrefectFutureList[R]:
@@ -1242,10 +1242,10 @@ def map(
self,
*args: Any,
return_state: bool = False,
- wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = None,
+ wait_for: Optional[Iterable[Union[PrefectFuture[R], R]]] = None,
deferred: bool = False,
**kwargs: Any,
- ):
+ ) -> Union[List[State[R]], PrefectFutureList[R]]:
"""
Submit a mapped run of the task to a worker.
@@ -1394,7 +1394,7 @@ def map(
" execution."
)
if return_state:
- states = []
+ states: list[State[R]] = []
for future in futures:
future.wait()
states.append(future.state)
@@ -1406,9 +1406,9 @@ def apply_async(
self,
args: Optional[Tuple[Any, ...]] = None,
kwargs: Optional[Dict[str, Any]] = None,
- wait_for: Optional[Iterable[PrefectFuture]] = None,
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
- ) -> PrefectDistributedFuture:
+ ) -> PrefectDistributedFuture[R]:
"""
Create a pending task run for a task worker to execute.
@@ -1606,16 +1606,18 @@ def task(
timeout_seconds: Union[int, float, None] = None,
log_prints: Optional[bool] = None,
refresh_cache: Optional[bool] = None,
- on_completion: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
- on_failure: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
- retry_condition_fn: Optional[Callable[["Task", TaskRun, State], bool]] = None,
+ on_completion: Optional[
+ List[Callable[["Task[P, R]", TaskRun, State], None]]
+ ] = None,
+ on_failure: Optional[List[Callable[["Task[P, R]", TaskRun, State], None]]] = None,
+ retry_condition_fn: Optional[Callable[["Task[P, R]", TaskRun, State], bool]] = None,
viz_return_value: Any = None,
) -> Callable[[Callable[P, R]], Task[P, R]]:
...
def task(
- __fn=None,
+ __fn: Optional[Callable[P, R]] = None,
*,
name: Optional[str] = None,
description: Optional[str] = None,
@@ -1642,9 +1644,11 @@ def task(
timeout_seconds: Union[int, float, None] = None,
log_prints: Optional[bool] = None,
refresh_cache: Optional[bool] = None,
- on_completion: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
- on_failure: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
- retry_condition_fn: Optional[Callable[["Task", TaskRun, State], bool]] = None,
+ on_completion: Optional[
+ List[Callable[["Task[P, R]", TaskRun, State], None]]
+ ] = None,
+ on_failure: Optional[List[Callable[["Task[P, R]", TaskRun, State], None]]] = None,
+ retry_condition_fn: Optional[Callable[["Task[P, R]", TaskRun, State], bool]] = None,
viz_return_value: Any = None,
):
"""
diff --git a/src/prefect/utilities/visualization.py b/src/prefect/utilities/visualization.py
index 0a6bd73b2774..29349ac1c006 100644
--- a/src/prefect/utilities/visualization.py
+++ b/src/prefect/utilities/visualization.py
@@ -19,7 +19,7 @@ class VisualizationUnsupportedError(Exception):
class TaskVizTrackerState:
- current = None
+ current: Optional["TaskVizTracker"] = None
class GraphvizImportError(Exception):
@@ -37,7 +37,7 @@ def get_task_viz_tracker():
def track_viz_task(
is_async: bool,
task_name: str,
- parameters: dict,
+ parameters: dict[str, Any],
viz_return_value: Optional[Any] = None,
):
"""Return a result if sync otherwise return a coroutine that returns the result"""
@@ -50,14 +50,14 @@ def track_viz_task(
def _track_viz_task(
- task_name,
- parameters,
- viz_return_value=None,
+ task_name: str,
+ parameters: dict[str, Any],
+ viz_return_value: Optional[Any] = None,
) -> Any:
task_run_tracker = get_task_viz_tracker()
if task_run_tracker:
- upstream_tasks = []
- for k, v in parameters.items():
+ upstream_tasks: list[VizTask] = []
+ for _, v in parameters.items():
if isinstance(v, VizTask):
upstream_tasks.append(v)
# if it's an object that we've already seen,
@@ -93,9 +93,9 @@ def __init__(
class TaskVizTracker:
def __init__(self):
- self.tasks = []
- self.dynamic_task_counter = {}
- self.object_id_to_task = {}
+ self.tasks: list[VizTask] = []
+ self.dynamic_task_counter: dict[str, int] = {}
+ self.object_id_to_task: dict[int, VizTask] = {}
def add_task(self, task: VizTask):
if task.name not in self.dynamic_task_counter:
@@ -110,7 +110,7 @@ def __enter__(self):
TaskVizTrackerState.current = self
return self
- def __exit__(self, exc_type, exc_val, exc_tb):
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any):
TaskVizTrackerState.current = None
def link_viz_return_value_to_viz_task(
@@ -151,9 +151,9 @@ def build_task_dependencies(task_run_tracker: TaskVizTracker):
try:
g = graphviz.Digraph()
for task in task_run_tracker.tasks:
- g.node(task.name)
+ g.node(task.name) # type: ignore[reportUnknownMemberType]
for upstream in task.upstream_tasks:
- g.edge(upstream.name, task.name)
+ g.edge(upstream.name, task.name) # type: ignore[reportUnknownMemberType]
return g
except ImportError as exc:
raise GraphvizImportError from exc
@@ -184,7 +184,7 @@ def visualize_task_dependencies(graph: graphviz.Digraph, flow_run_name: str):
specifying a `viz_return_value`.
"""
try:
- graph.render(filename=flow_run_name, view=True, format="png", cleanup=True)
+ graph.render(filename=flow_run_name, view=True, format="png", cleanup=True) # type: ignore[reportUnknownMemberType]
except graphviz.backend.ExecutableNotFound as exc:
msg = (
"It appears you do not have Graphviz installed, or it is not on your "
From 64879322ee7a343fec91315668ee5bec6f75d7a7 Mon Sep 17 00:00:00 2001
From: nate nowack
Date: Fri, 6 Dec 2024 14:20:37 -0600
Subject: [PATCH 27/92] use `uv` to install dependencies in the `Dockerfile`
(#16235)
---
Dockerfile | 54 ++++++++++++++++++++++++++++--------------------------
1 file changed, 28 insertions(+), 26 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 0f6b678dfaf7..75abd96e188b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,7 +14,7 @@ ARG NODE_VERSION=16.15
ARG EXTRA_PIP_PACKAGES=""
# Build the UI distributable.
-FROM node:${NODE_VERSION}-bullseye-slim as ui-builder
+FROM node:${NODE_VERSION}-bullseye-slim AS ui-builder
WORKDIR /opt/ui
@@ -61,7 +61,7 @@ RUN mv "dist/$(python setup.py --fullname).tar.gz" "dist/prefect.tar.gz"
# Setup a base final image from miniconda
-FROM continuumio/miniconda3 as prefect-conda
+FROM continuumio/miniconda3 AS prefect-conda
# Create a new conda environment with our required Python version
ARG PYTHON_VERSION
@@ -76,49 +76,51 @@ SHELL ["/bin/bash", "--login", "-c"]
# Build the final image with Prefect installed and our entrypoint configured
-FROM ${BASE_IMAGE} as final
+FROM ${BASE_IMAGE} AS final
-ENV LC_ALL C.UTF-8
-ENV LANG C.UTF-8
+ENV LC_ALL=C.UTF-8
+ENV LANG=C.UTF-8
-LABEL maintainer="help@prefect.io"
-LABEL io.prefect.python-version=${PYTHON_VERSION}
-LABEL org.label-schema.schema-version = "1.0"
-LABEL org.label-schema.name="prefect"
-LABEL org.label-schema.url="https://www.prefect.io/"
+LABEL maintainer="help@prefect.io" \
+ io.prefect.python-version=${PYTHON_VERSION} \
+ org.label-schema.schema-version="1.0" \
+ org.label-schema.name="prefect" \
+ org.label-schema.url="https://www.prefect.io/"
WORKDIR /opt/prefect
-# Install requirements
-# - tini: Used in the entrypoint
-# - build-essential: Required for Python dependencies without wheels
-# - git: Required for retrieving workflows from git sources
+# Install system requirements
RUN apt-get update && \
apt-get install --no-install-recommends -y \
tini=0.19.* \
build-essential \
git=1:2.* \
+ curl \
+ ca-certificates \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
-# Pin the pip version
-RUN python -m pip install --no-cache-dir pip==24.2
+# Install UV from official image
+COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
-# Install the base requirements separately so they cache
-COPY requirements-client.txt requirements.txt ./
-RUN pip install --upgrade --upgrade-strategy eager --no-cache-dir -r requirements.txt
+# Install dependencies using a temporary mount for requirements files
+RUN --mount=type=bind,source=requirements-client.txt,target=/tmp/requirements-client.txt \
+ --mount=type=bind,source=requirements.txt,target=/tmp/requirements.txt \
+ uv pip install --system -r /tmp/requirements.txt
# Install prefect from the sdist
COPY --from=python-builder /opt/prefect/dist ./dist
-# Extras to include during `pip install`. Must be wrapped in brackets, e.g. "[dev]"
-ARG PREFECT_EXTRAS=${PREFECT_EXTRAS:-""}
-RUN pip install --no-cache-dir "./dist/prefect.tar.gz${PREFECT_EXTRAS}"
+# Extras to include during installation
+ARG PREFECT_EXTRAS
+RUN uv pip install --system "./dist/prefect.tar.gz${PREFECT_EXTRAS:-""}" && \
+ rm -rf dist/
-# Remove setuptools from the image
-RUN pip uninstall -y setuptools
+# Remove setuptools
+RUN uv pip uninstall --system setuptools
-ARG EXTRA_PIP_PACKAGES=${EXTRA_PIP_PACKAGES:-""}
-RUN [ -z "${EXTRA_PIP_PACKAGES}" ] || pip install --no-cache-dir "${EXTRA_PIP_PACKAGES}"
+# Install any extra packages
+ARG EXTRA_PIP_PACKAGES
+RUN [ -z "${EXTRA_PIP_PACKAGES:-""}" ] || uv pip install --system "${EXTRA_PIP_PACKAGES}"
# Smoke test
RUN prefect version
From 37baf01c28f8e49bd947f083c53beb7fd91e0b55 Mon Sep 17 00:00:00 2001
From: Chris Pickett
Date: Fri, 6 Dec 2024 15:40:15 -0500
Subject: [PATCH 28/92] Raise informative errors when failing to bootstrap
telemetry (#16253)
---
src/prefect/telemetry/bootstrap.py | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
diff --git a/src/prefect/telemetry/bootstrap.py b/src/prefect/telemetry/bootstrap.py
index 89aedad095f3..0f6fe656cf48 100644
--- a/src/prefect/telemetry/bootstrap.py
+++ b/src/prefect/telemetry/bootstrap.py
@@ -23,10 +23,23 @@ def setup_telemetry() -> (
if server_type != ServerType.CLOUD:
return None, None, None
- assert settings.api.key
+ if not settings.api.key:
+ raise ValueError(
+ "A Prefect Cloud API key is required to enable telemetry. Please set "
+ "the `PREFECT_API_KEY` environment variable or authenticate with "
+ "Prefect Cloud via the `prefect cloud login` command."
+ )
+
assert settings.api.url
# This import is here to defer importing of the `opentelemetry` packages.
- from .instrumentation import setup_exporters
+ try:
+ from .instrumentation import setup_exporters
+ except ImportError as exc:
+ raise ValueError(
+ "Unable to import OpenTelemetry instrumentation libraries. Please "
+ "ensure you have installed the `otel` extra when installing Prefect: "
+ "`pip install 'prefect[otel]'`"
+ ) from exc
return setup_exporters(settings.api.url, settings.api.key.get_secret_value())
From 745b4d2762dbff1b3f447f6c1457024fe8ab2f6f Mon Sep 17 00:00:00 2001
From: nate nowack
Date: Fri, 6 Dec 2024 16:25:17 -0600
Subject: [PATCH 29/92] fruitful typing scouring (#16255)
---
src/prefect/automations.py | 100 ++++++++++++++++--------------
src/prefect/cache_policies.py | 22 +++----
src/prefect/context.py | 49 ++++++++-------
src/prefect/settings/constants.py | 4 +-
src/prefect/settings/legacy.py | 2 +-
src/prefect/utilities/hashing.py | 13 ++--
tests/test_automations.py | 10 +++
7 files changed, 112 insertions(+), 88 deletions(-)
diff --git a/src/prefect/automations.py b/src/prefect/automations.py
index 86799ec59cd6..a37c5a3a45dd 100644
--- a/src/prefect/automations.py
+++ b/src/prefect/automations.py
@@ -1,10 +1,10 @@
-from typing import Optional
+from typing import Optional, Type
from uuid import UUID
from pydantic import Field
from typing_extensions import Self
-from prefect.client.utilities import get_or_create_client
+from prefect.client.orchestration import get_client
from prefect.events.actions import (
CallWebhook,
CancelFlowRun,
@@ -99,10 +99,10 @@ async def create(self: Self) -> Self:
)
created_automation = auto_to_create.create()
"""
- client, _ = get_or_create_client()
- automation = AutomationCore(**self.model_dump(exclude={"id"}))
- self.id = await client.create_automation(automation=automation)
- return self
+ async with get_client() as client:
+ automation = AutomationCore(**self.model_dump(exclude={"id"}))
+ self.id = await client.create_automation(automation=automation)
+ return self
@sync_compatible
async def update(self: Self):
@@ -112,15 +112,16 @@ async def update(self: Self):
auto.name = "new name"
auto.update()
"""
-
- client, _ = get_or_create_client()
- automation = AutomationCore(**self.model_dump(exclude={"id", "owner_resource"}))
- await client.update_automation(automation_id=self.id, automation=automation)
+ async with get_client() as client:
+ automation = AutomationCore(
+ **self.model_dump(exclude={"id", "owner_resource"})
+ )
+ await client.update_automation(automation_id=self.id, automation=automation)
@classmethod
@sync_compatible
async def read(
- cls: Self, id: Optional[UUID] = None, name: Optional[str] = None
+ cls: Type[Self], id: Optional[UUID] = None, name: Optional[str] = None
) -> Self:
"""
Read an automation by ID or name.
@@ -134,20 +135,25 @@ async def read(
raise ValueError("Only one of id or name can be provided")
if not id and not name:
raise ValueError("One of id or name must be provided")
- client, _ = get_or_create_client()
- if id:
- try:
- automation = await client.read_automation(automation_id=id)
- except PrefectHTTPStatusError as exc:
- if exc.response.status_code == 404:
+ async with get_client() as client:
+ if id:
+ try:
+ automation = await client.read_automation(automation_id=id)
+ except PrefectHTTPStatusError as exc:
+ if exc.response.status_code == 404:
+ raise ValueError(f"Automation with ID {id!r} not found")
+ raise
+ if automation is None:
raise ValueError(f"Automation with ID {id!r} not found")
- return Automation(**automation.model_dump())
- else:
- automation = await client.read_automations_by_name(name=name)
- if len(automation) > 0:
- return Automation(**automation[0].model_dump()) if automation else None
+ return Automation(**automation.model_dump())
else:
- raise ValueError(f"Automation with name {name!r} not found")
+ automation = await client.read_automations_by_name(name=name)
+ if len(automation) > 0:
+ return (
+ Automation(**automation[0].model_dump()) if automation else None
+ )
+ else:
+ raise ValueError(f"Automation with name {name!r} not found")
@sync_compatible
async def delete(self: Self) -> bool:
@@ -155,14 +161,14 @@ async def delete(self: Self) -> bool:
auto = Automation.read(id = 123)
auto.delete()
"""
- try:
- client, _ = get_or_create_client()
- await client.delete_automation(self.id)
- return True
- except PrefectHTTPStatusError as exc:
- if exc.response.status_code == 404:
- return False
- raise
+ async with get_client() as client:
+ try:
+ await client.delete_automation(self.id)
+ return True
+ except PrefectHTTPStatusError as exc:
+ if exc.response.status_code == 404:
+ return False
+ raise
@sync_compatible
async def disable(self: Self) -> bool:
@@ -171,14 +177,14 @@ async def disable(self: Self) -> bool:
auto = Automation.read(id = 123)
auto.disable()
"""
- try:
- client, _ = get_or_create_client()
- await client.pause_automation(self.id)
- return True
- except PrefectHTTPStatusError as exc:
- if exc.response.status_code == 404:
- return False
- raise
+ async with get_client() as client:
+ try:
+ await client.pause_automation(self.id)
+ return True
+ except PrefectHTTPStatusError as exc:
+ if exc.response.status_code == 404:
+ return False
+ raise
@sync_compatible
async def enable(self: Self) -> bool:
@@ -187,11 +193,11 @@ async def enable(self: Self) -> bool:
auto = Automation.read(id = 123)
auto.enable()
"""
- try:
- client, _ = get_or_create_client()
- await client.resume_automation("asd")
- return True
- except PrefectHTTPStatusError as exc:
- if exc.response.status_code == 404:
- return False
- raise
+ async with get_client() as client:
+ try:
+ await client.resume_automation(self.id)
+ return True
+ except PrefectHTTPStatusError as exc:
+ if exc.response.status_code == 404:
+ return False
+ raise
diff --git a/src/prefect/cache_policies.py b/src/prefect/cache_policies.py
index 50717e5ceaea..746f8561cdfa 100644
--- a/src/prefect/cache_policies.py
+++ b/src/prefect/cache_policies.py
@@ -75,12 +75,12 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
raise NotImplementedError
def __sub__(self, other: str) -> "CachePolicy":
- if not isinstance(other, str):
+ if not isinstance(other, str): # type: ignore[reportUnnecessaryIsInstance]
raise TypeError("Can only subtract strings from key policies.")
new = Inputs(exclude=[other])
return CompoundCachePolicy(policies=[self, new])
@@ -140,7 +140,7 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
if self.cache_key_fn:
return self.cache_key_fn(task_ctx, inputs)
@@ -162,9 +162,9 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
- keys = []
+ keys: list[str] = []
for policy in self.policies:
policy_key = policy.compute_key(
task_ctx=task_ctx,
@@ -191,7 +191,7 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
return None
@@ -211,7 +211,7 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Optional[Dict[str, Any]],
flow_parameters: Optional[Dict[str, Any]],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
if not task_ctx:
return None
@@ -238,7 +238,7 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
if not flow_parameters:
return None
@@ -257,7 +257,7 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
if not task_ctx:
return None
@@ -280,7 +280,7 @@ def compute_key(
task_ctx: TaskRunContext,
inputs: Dict[str, Any],
flow_parameters: Dict[str, Any],
- **kwargs,
+ **kwargs: Any,
) -> Optional[str]:
hashed_inputs = {}
inputs = inputs or {}
@@ -307,7 +307,7 @@ def compute_key(
raise ValueError(msg) from exc
def __sub__(self, other: str) -> "CachePolicy":
- if not isinstance(other, str):
+ if not isinstance(other, str): # type: ignore[reportUnnecessaryIsInstance]
raise TypeError("Can only subtract strings from key policies.")
return Inputs(exclude=self.exclude + [other])
diff --git a/src/prefect/context.py b/src/prefect/context.py
index 675812de85c2..69c14ce4fdb0 100644
--- a/src/prefect/context.py
+++ b/src/prefect/context.py
@@ -43,7 +43,9 @@
get_default_persist_setting_for_tasks,
)
from prefect.settings import Profile, Settings
-from prefect.settings.legacy import _get_settings_fields
+from prefect.settings.legacy import (
+ _get_settings_fields, # type: ignore[reportPrivateUsage]
+)
from prefect.states import State
from prefect.task_runners import TaskRunner
from prefect.utilities.services import start_client_metrics_server
@@ -200,14 +202,14 @@ class SyncClientContext(ContextModel):
assert c1 is ctx.client
"""
- __var__ = ContextVar("sync-client-context")
+ __var__: ContextVar[Self] = ContextVar("sync-client-context")
client: SyncPrefectClient
_httpx_settings: Optional[dict[str, Any]] = PrivateAttr(None)
_context_stack: int = PrivateAttr(0)
def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
super().__init__(
- client=get_client(sync_client=True, httpx_settings=httpx_settings),
+ client=get_client(sync_client=True, httpx_settings=httpx_settings), # type: ignore[reportCallIssue]
)
self._httpx_settings = httpx_settings
self._context_stack = 0
@@ -221,11 +223,11 @@ def __enter__(self):
else:
return self
- def __exit__(self, *exc_info):
+ def __exit__(self, *exc_info: Any):
self._context_stack -= 1
if self._context_stack == 0:
- self.client.__exit__(*exc_info)
- return super().__exit__(*exc_info)
+ self.client.__exit__(*exc_info) # type: ignore[reportUnknownMemberType]
+ return super().__exit__(*exc_info) # type: ignore[reportUnknownMemberType]
@classmethod
@contextmanager
@@ -265,12 +267,12 @@ class AsyncClientContext(ContextModel):
def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
super().__init__(
- client=get_client(sync_client=False, httpx_settings=httpx_settings),
+ client=get_client(sync_client=False, httpx_settings=httpx_settings), # type: ignore[reportCallIssue]
)
self._httpx_settings = httpx_settings
self._context_stack = 0
- async def __aenter__(self):
+ async def __aenter__(self: Self) -> Self:
self._context_stack += 1
if self._context_stack == 1:
await self.client.__aenter__()
@@ -279,11 +281,11 @@ async def __aenter__(self):
else:
return self
- async def __aexit__(self, *exc_info):
+ async def __aexit__(self: Self, *exc_info: Any) -> None:
self._context_stack -= 1
if self._context_stack == 0:
- await self.client.__aexit__(*exc_info)
- return super().__exit__(*exc_info)
+ await self.client.__aexit__(*exc_info) # type: ignore[reportUnknownMemberType]
+ return super().__exit__(*exc_info) # type: ignore[reportUnknownMemberType]
@classmethod
@asynccontextmanager
@@ -306,7 +308,7 @@ class RunContext(ContextModel):
client: The Prefect client instance being used for API communication
"""
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args: Any, **kwargs: Any):
super().__init__(*args, **kwargs)
start_client_metrics_server()
@@ -315,10 +317,11 @@ def __init__(self, *args, **kwargs):
input_keyset: Optional[Dict[str, Dict[str, str]]] = None
client: Union[PrefectClient, SyncPrefectClient]
- def serialize(self: Self) -> Dict[str, Any]:
+ def serialize(self: Self, include_secrets: bool = True) -> Dict[str, Any]:
return self.model_dump(
include={"start_time", "input_keyset"},
exclude_unset=True,
+ context={"include_secrets": include_secrets},
)
@@ -364,9 +367,9 @@ class EngineContext(RunContext):
# Events worker to emit events
events: Optional[EventsWorker] = None
- __var__: ContextVar = ContextVar("flow_run")
+ __var__: ContextVar[Self] = ContextVar("flow_run")
- def serialize(self):
+ def serialize(self: Self, include_secrets: bool = True) -> Dict[str, Any]:
return self.model_dump(
include={
"flow_run",
@@ -380,6 +383,7 @@ def serialize(self):
},
exclude_unset=True,
serialize_as_any=True,
+ context={"include_secrets": include_secrets},
)
@@ -396,7 +400,7 @@ class TaskRunContext(RunContext):
task_run: The API metadata for this task run
"""
- task: "Task"
+ task: "Task[Any, Any]"
task_run: TaskRun
log_prints: bool = False
parameters: Dict[str, Any]
@@ -407,7 +411,7 @@ class TaskRunContext(RunContext):
__var__ = ContextVar("task_run")
- def serialize(self):
+ def serialize(self: Self, include_secrets: bool = True) -> Dict[str, Any]:
return self.model_dump(
include={
"task_run",
@@ -421,6 +425,7 @@ def serialize(self):
},
exclude_unset=True,
serialize_as_any=True,
+ context={"include_secrets": include_secrets},
)
@@ -439,7 +444,7 @@ def get(cls) -> "TagsContext":
# Return an empty `TagsContext` instead of `None` if no context exists
return cls.__var__.get(TagsContext())
- __var__: ContextVar = ContextVar("tags")
+ __var__: ContextVar[Self] = ContextVar("tags")
class SettingsContext(ContextModel):
@@ -456,9 +461,9 @@ class SettingsContext(ContextModel):
profile: Profile
settings: Settings
- __var__: ContextVar = ContextVar("settings")
+ __var__: ContextVar[Self] = ContextVar("settings")
- def __hash__(self) -> int:
+ def __hash__(self: Self) -> int:
return hash(self.settings)
@classmethod
@@ -565,7 +570,7 @@ def tags(*new_tags: str) -> Generator[Set[str], None, None]:
@contextmanager
def use_profile(
- profile: Union[Profile, str],
+ profile: Union[Profile, str, Any],
override_environment_variables: bool = False,
include_current_context: bool = True,
):
@@ -665,7 +670,7 @@ def root_settings_context():
# an override in the `SettingsContext.get` method.
-GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context()
+GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context() # type: ignore[reportConstantRedefinition]
# 2024-07-02: This surfaces an actionable error message for removed objects
diff --git a/src/prefect/settings/constants.py b/src/prefect/settings/constants.py
index ac7520492b61..70d00ccd9394 100644
--- a/src/prefect/settings/constants.py
+++ b/src/prefect/settings/constants.py
@@ -1,8 +1,8 @@
from pathlib import Path
-from typing import Tuple, Type
+from typing import Any, Tuple, Type
from pydantic import Secret, SecretStr
DEFAULT_PREFECT_HOME = Path.home() / ".prefect"
DEFAULT_PROFILES_PATH = Path(__file__).parent.joinpath("profiles.toml")
-_SECRET_TYPES: Tuple[Type, ...] = (Secret, SecretStr)
+_SECRET_TYPES: Tuple[Type[Any], ...] = (Secret, SecretStr)
diff --git a/src/prefect/settings/legacy.py b/src/prefect/settings/legacy.py
index 17f76e3f1404..6bc496fe1aee 100644
--- a/src/prefect/settings/legacy.py
+++ b/src/prefect/settings/legacy.py
@@ -8,7 +8,7 @@
from typing_extensions import Self
from prefect.settings.base import PrefectBaseSettings
-from prefect.settings.constants import _SECRET_TYPES
+from prefect.settings.constants import _SECRET_TYPES # type: ignore[reportPrivateUsage]
from prefect.settings.context import get_current_settings
from prefect.settings.models.root import Settings
diff --git a/src/prefect/utilities/hashing.py b/src/prefect/utilities/hashing.py
index 2724cb38c3f4..b31a60609164 100644
--- a/src/prefect/utilities/hashing.py
+++ b/src/prefect/utilities/hashing.py
@@ -2,7 +2,7 @@
import sys
from functools import partial
from pathlib import Path
-from typing import Optional, Union
+from typing import Any, Callable, Optional, Union
import cloudpickle
@@ -15,7 +15,7 @@
_md5 = hashlib.md5
-def stable_hash(*args: Union[str, bytes], hash_algo=_md5) -> str:
+def stable_hash(*args: Union[str, bytes], hash_algo: Callable[..., Any] = _md5) -> str:
"""Given some arguments, produces a stable 64-bit hash of their contents.
Supports bytes and strings. Strings will be UTF-8 encoded.
@@ -35,7 +35,7 @@ def stable_hash(*args: Union[str, bytes], hash_algo=_md5) -> str:
return h.hexdigest()
-def file_hash(path: str, hash_algo=_md5) -> str:
+def file_hash(path: str, hash_algo: Callable[..., Any] = _md5) -> str:
"""Given a path to a file, produces a stable hash of the file contents.
Args:
@@ -50,7 +50,10 @@ def file_hash(path: str, hash_algo=_md5) -> str:
def hash_objects(
- *args, hash_algo=_md5, raise_on_failure: bool = False, **kwargs
+ *args: Any,
+ hash_algo: Callable[..., Any] = _md5,
+ raise_on_failure: bool = False,
+ **kwargs: Any,
) -> Optional[str]:
"""
Attempt to hash objects by dumping to JSON or serializing with cloudpickle.
@@ -77,7 +80,7 @@ def hash_objects(
json_error = str(e)
try:
- return stable_hash(cloudpickle.dumps((args, kwargs)), hash_algo=hash_algo)
+ return stable_hash(cloudpickle.dumps((args, kwargs)), hash_algo=hash_algo) # type: ignore[reportUnknownMemberType]
except Exception as e:
pickle_error = str(e)
diff --git a/tests/test_automations.py b/tests/test_automations.py
index c9efa5e45eb2..96781cf99e18 100644
--- a/tests/test_automations.py
+++ b/tests/test_automations.py
@@ -183,3 +183,13 @@ async def test_nonexistent_id_raises_value_error():
async def test_nonexistent_name_raises_value_error():
with pytest.raises(ValueError):
await Automation.read(name="nonexistent_name")
+
+
+async def test_disabled_automation_can_be_enabled(
+ prefect_client, automation: Automation
+):
+ await automation.disable()
+ await automation.enable()
+
+ updated_automation = await Automation.read(id=automation.id)
+ assert updated_automation.enabled is True
From 3b5bbea9f6a2740ce7ef3481f07666d54cdbab9b Mon Sep 17 00:00:00 2001
From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com>
Date: Fri, 6 Dec 2024 14:34:22 -0800
Subject: [PATCH 30/92] [UI v2] Adds Create or edit dialog for concurrency
limit (#16248)
---
ui-v2/package-lock.json | 30 ++++
ui-v2/package.json | 1 +
.../concurrency/concurrency-tabs.tsx | 1 -
.../create-or-edit-limit-dialog/index.tsx | 128 ++++++++++++++++++
.../use-create-or-edit-limit-form.ts | 121 +++++++++++++++++
.../global-concurrency-view/index.tsx | 39 ++++--
ui-v2/src/components/ui/switch.tsx | 29 ++++
7 files changed, 334 insertions(+), 15 deletions(-)
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/use-create-or-edit-limit-form.ts
create mode 100644 ui-v2/src/components/ui/switch.tsx
diff --git a/ui-v2/package-lock.json b/ui-v2/package-lock.json
index ba67571b4e1f..97803c3c4501 100644
--- a/ui-v2/package-lock.json
+++ b/ui-v2/package-lock.json
@@ -22,6 +22,7 @@
"@radix-ui/react-select": "^2.1.2",
"@radix-ui/react-separator": "^1.1.0",
"@radix-ui/react-slot": "^1.1.0",
+ "@radix-ui/react-switch": "^1.1.1",
"@radix-ui/react-tabs": "^1.1.1",
"@radix-ui/react-toast": "^1.2.2",
"@radix-ui/react-tooltip": "^1.1.3",
@@ -2377,6 +2378,35 @@
}
}
},
+ "node_modules/@radix-ui/react-switch": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.1.1.tgz",
+ "integrity": "sha512-diPqDDoBcZPSicYoMWdWx+bCPuTRH4QSp9J+65IvtdS0Kuzt67bI6n32vCj8q6NZmYW/ah+2orOtMwcX5eQwIg==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/primitive": "1.1.0",
+ "@radix-ui/react-compose-refs": "1.1.0",
+ "@radix-ui/react-context": "1.1.1",
+ "@radix-ui/react-primitive": "2.0.0",
+ "@radix-ui/react-use-controllable-state": "1.1.0",
+ "@radix-ui/react-use-previous": "1.1.0",
+ "@radix-ui/react-use-size": "1.1.0"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "@types/react-dom": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
+ "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ },
+ "@types/react-dom": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-tabs": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.1.tgz",
diff --git a/ui-v2/package.json b/ui-v2/package.json
index 46fe5dbd6922..43c190b4969b 100644
--- a/ui-v2/package.json
+++ b/ui-v2/package.json
@@ -31,6 +31,7 @@
"@radix-ui/react-select": "^2.1.2",
"@radix-ui/react-separator": "^1.1.0",
"@radix-ui/react-slot": "^1.1.0",
+ "@radix-ui/react-switch": "^1.1.1",
"@radix-ui/react-tabs": "^1.1.1",
"@radix-ui/react-toast": "^1.2.2",
"@radix-ui/react-tooltip": "^1.1.3",
diff --git a/ui-v2/src/components/concurrency/concurrency-tabs.tsx b/ui-v2/src/components/concurrency/concurrency-tabs.tsx
index 2c7fdd94d6b9..004ed63ab452 100644
--- a/ui-v2/src/components/concurrency/concurrency-tabs.tsx
+++ b/ui-v2/src/components/concurrency/concurrency-tabs.tsx
@@ -54,7 +54,6 @@ export const ConcurrencyTabs = ({
>
{TAB_OPTIONS.global.displayValue}
-
{
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx
new file mode 100644
index 000000000000..5e737dda25d3
--- /dev/null
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx
@@ -0,0 +1,128 @@
+import { Button } from "@/components/ui/button";
+import {
+ Dialog,
+ DialogContent,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+ DialogTrigger,
+} from "@/components/ui/dialog";
+import {
+ Form,
+ FormControl,
+ FormField,
+ FormItem,
+ FormLabel,
+ FormMessage,
+} from "@/components/ui/form";
+import { Input } from "@/components/ui/input";
+import { Switch } from "@/components/ui/switch";
+import { type GlobalConcurrencyLimit } from "@/hooks/global-concurrency-limits";
+
+import { useCreateOrEditLimitForm } from "./use-create-or-edit-limit-form";
+
+type Props = {
+ limitToUpdate: undefined | GlobalConcurrencyLimit;
+ onOpenChange: (open: boolean) => void;
+ onSubmit: () => void;
+ open: boolean;
+};
+
+export const CreateOrEditLimitDialog = ({
+ limitToUpdate,
+ onOpenChange,
+ onSubmit,
+ open,
+}: Props) => {
+ const { form, isLoading, saveOrUpdate } = useCreateOrEditLimitForm({
+ limitToUpdate,
+ onSubmit,
+ });
+
+ const dialogTitle = limitToUpdate
+ ? `Update ${limitToUpdate.name}`
+ : "Add Concurrency Limit";
+
+ return (
+
+ );
+};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/use-create-or-edit-limit-form.ts b/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/use-create-or-edit-limit-form.ts
new file mode 100644
index 000000000000..a55ec64fb518
--- /dev/null
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/use-create-or-edit-limit-form.ts
@@ -0,0 +1,121 @@
+import {
+ GlobalConcurrencyLimit,
+ useCreateGlobalConcurrencyLimit,
+ useUpdateGlobalConcurrencyLimit,
+} from "@/hooks/global-concurrency-limits";
+import { useToast } from "@/hooks/use-toast";
+import { zodResolver } from "@hookform/resolvers/zod";
+import { useEffect } from "react";
+import { useForm } from "react-hook-form";
+import { z } from "zod";
+
+const formSchema = z.object({
+ active: z.boolean().default(true),
+ /** Coerce to solve common issue of transforming a string number to a number type */
+ denied_slots: z.number().default(0).or(z.string()).pipe(z.coerce.number()),
+ /** Coerce to solve common issue of transforming a string number to a number type */
+ limit: z.number().default(0).or(z.string()).pipe(z.coerce.number()),
+ name: z
+ .string()
+ .min(2, { message: "Name must be at least 2 characters" })
+ .default(""),
+ /** Coerce to solve common issue of transforming a string number to a number type */
+ slot_decay_per_second: z
+ .number()
+ .default(0)
+ .or(z.string())
+ .pipe(z.coerce.number()),
+ /** Additional fields post creation. Coerce to solve common issue of transforming a string number to a number type */
+ active_slots: z.number().default(0).or(z.string()).pipe(z.coerce.number()),
+});
+
+const DEFAULT_VALUES = {
+ active: true,
+ name: "",
+ limit: 0,
+ slot_decay_per_second: 0,
+ denied_slots: 0,
+ active_slots: 0,
+} as const;
+
+type useCreateOrEditLimitForm = {
+ /** Limit to edit. Pass undefined if creating a new limit */
+ limitToUpdate: GlobalConcurrencyLimit | undefined;
+ /** Callback after hitting Save or Update */
+ onSubmit: () => void;
+};
+
+export const useCreateOrEditLimitForm = ({
+ limitToUpdate,
+ onSubmit,
+}: useCreateOrEditLimitForm) => {
+ const { toast } = useToast();
+
+ const { createGlobalConcurrencyLimit, status: createStatus } =
+ useCreateGlobalConcurrencyLimit();
+ const { updateGlobalConcurrencyLimit, status: updateStatus } =
+ useUpdateGlobalConcurrencyLimit();
+
+ const form = useForm>({
+ resolver: zodResolver(formSchema),
+ defaultValues: DEFAULT_VALUES,
+ });
+
+ // Sync form data with limit-to-edit data
+ useEffect(() => {
+ if (limitToUpdate) {
+ const { active, name, limit, slot_decay_per_second, active_slots } =
+ limitToUpdate;
+ form.reset({ active, name, limit, slot_decay_per_second, active_slots });
+ } else {
+ form.reset(DEFAULT_VALUES);
+ }
+ }, [form, limitToUpdate]);
+
+ const saveOrUpdate = (values: z.infer) => {
+ const onSettled = () => {
+ form.reset(DEFAULT_VALUES);
+ onSubmit();
+ };
+
+ if (limitToUpdate?.id) {
+ updateGlobalConcurrencyLimit(
+ {
+ id_or_name: limitToUpdate.id,
+ ...values,
+ },
+ {
+ onSuccess: () => {
+ toast({ title: "Limit updated" });
+ },
+ onError: (error) => {
+ const message =
+ error.message || "Unknown error while updating limit.";
+ form.setError("root", { message });
+ },
+ onSettled,
+ },
+ );
+ } else {
+ createGlobalConcurrencyLimit(values, {
+ onSuccess: () => {
+ toast({ title: "Limit created" });
+ },
+ onError: (error) => {
+ const message =
+ error.message || "Unknown error while creating variable.";
+ form.setError("root", {
+ message,
+ });
+ },
+ onSettled,
+ });
+ }
+ };
+
+ return {
+ form,
+ saveOrUpdate,
+ isLoading: createStatus === "pending" || updateStatus === "pending",
+ };
+};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
index 033d5fb162ed..4f14513692f3 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
@@ -1,28 +1,39 @@
+import { Flex } from "@/components/ui/flex";
import { useListGlobalConcurrencyLimits } from "@/hooks/global-concurrency-limits";
import { useState } from "react";
-
+import { CreateOrEditLimitDialog } from "./create-or-edit-limit-dialog";
+import { GlobalConcurrencyLimitEmptyState } from "./global-concurrency-limit-empty-state";
import { GlobalConcurrencyLimitsHeader } from "./global-concurrency-limits-header";
export const GlobalConcurrencyView = () => {
- const [showAddDialog, setShowAddDialog] = useState(false);
+ const [openDialog, setOpenDialog] = useState(false);
const { data } = useListGlobalConcurrencyLimits();
- const openAddDialog = () => setShowAddDialog(true);
- const closeAddDialog = () => setShowAddDialog(false);
+ const openAddDialog = () => setOpenDialog(true);
+ const closeAddDialog = () => setOpenDialog(false);
return (
<>
-
-
-
- TODO
-
- {data.map((limit) => (
- - {JSON.stringify(limit)}
- ))}
-
- {showAddDialog && TODO: DIALOG
}
+ {data.length === 0 ? (
+
+ ) : (
+
+
+ TODO
+
+ {data.map((limit) => (
+ - {JSON.stringify(limit)}
+ ))}
+
+
+ )}
+
>
);
};
diff --git a/ui-v2/src/components/ui/switch.tsx b/ui-v2/src/components/ui/switch.tsx
new file mode 100644
index 000000000000..e20fb378e4ed
--- /dev/null
+++ b/ui-v2/src/components/ui/switch.tsx
@@ -0,0 +1,29 @@
+import * as SwitchPrimitives from "@radix-ui/react-switch";
+import * as React from "react";
+
+import { cn } from "@/lib/utils";
+
+const Switch = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef & {
+ className: string;
+ }
+>(({ className, ...props }, ref) => (
+
+
+
+));
+Switch.displayName = SwitchPrimitives.Root.displayName;
+
+export { Switch };
From 794054aabe747778fa59fa3bdac4e977112bc999 Mon Sep 17 00:00:00 2001
From: Adam Azzam <33043305+aaazzam@users.noreply.github.com>
Date: Fri, 6 Dec 2024 18:40:16 -0500
Subject: [PATCH 31/92] [typing] highlighters.py (#16259)
---
src/prefect/logging/highlighters.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/src/prefect/logging/highlighters.py b/src/prefect/logging/highlighters.py
index 7b4bd1da2752..b842f7c95240 100644
--- a/src/prefect/logging/highlighters.py
+++ b/src/prefect/logging/highlighters.py
@@ -45,8 +45,7 @@ class StateHighlighter(RegexHighlighter):
base_style = "state."
highlights = [
- rf"(?P<{state.value.lower()}_state>{state.value.title()})"
- for state in StateType
+ rf"(?P<{state.lower()}_state>{state.title()})" for state in StateType
] + [
r"(?PCached)(?=\(type=COMPLETED\))" # Highlight only "Cached"
]
From 87fb87f37de626ef28f5b8b31438a26e1dea9b50 Mon Sep 17 00:00:00 2001
From: Adam Azzam <33043305+aaazzam@users.noreply.github.com>
Date: Fri, 6 Dec 2024 18:40:50 -0500
Subject: [PATCH 32/92] [typing] Update migration.py (#16261)
---
src/prefect/_internal/compatibility/migration.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/prefect/_internal/compatibility/migration.py b/src/prefect/_internal/compatibility/migration.py
index f160990d77dc..f39739f2c9df 100644
--- a/src/prefect/_internal/compatibility/migration.py
+++ b/src/prefect/_internal/compatibility/migration.py
@@ -86,7 +86,7 @@
# See src/prefect/filesystems.py for an example
-def import_string_class_method(new_location: str) -> Callable:
+def import_string_class_method(new_location: str) -> Callable[..., Any]:
"""
Handle moved class methods.
From 1c30778b15b89a6da1a7efa17e26cd3cfedee610 Mon Sep 17 00:00:00 2001
From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com>
Date: Sat, 7 Dec 2024 12:50:39 -0800
Subject: [PATCH 33/92] [UI v2] chore: Running react-19 codemods (#16264)
---
ui-v2/src/components/concurrency/concurrency-page.tsx | 2 ++
ui-v2/src/components/concurrency/concurrency-tabs.tsx | 1 +
ui-v2/src/components/flows/detail/index.tsx | 5 +++--
ui-v2/src/components/ui/docs-link.tsx | 2 ++
ui-v2/src/components/ui/empty-state.stories.tsx | 2 ++
ui-v2/src/components/ui/empty-state.tsx | 1 +
ui-v2/src/components/ui/icons/icons.stories.tsx | 1 +
ui-v2/src/hooks/use-is-overflowing.ts | 2 +-
8 files changed, 13 insertions(+), 3 deletions(-)
diff --git a/ui-v2/src/components/concurrency/concurrency-page.tsx b/ui-v2/src/components/concurrency/concurrency-page.tsx
index 045a890c6d00..62b1826925f5 100644
--- a/ui-v2/src/components/concurrency/concurrency-page.tsx
+++ b/ui-v2/src/components/concurrency/concurrency-page.tsx
@@ -1,3 +1,5 @@
+import type { JSX } from "react";
+
import { Flex } from "@/components/ui/flex";
import { Typography } from "@/components/ui/typography";
diff --git a/ui-v2/src/components/concurrency/concurrency-tabs.tsx b/ui-v2/src/components/concurrency/concurrency-tabs.tsx
index 004ed63ab452..49fa4d3b9760 100644
--- a/ui-v2/src/components/concurrency/concurrency-tabs.tsx
+++ b/ui-v2/src/components/concurrency/concurrency-tabs.tsx
@@ -1,6 +1,7 @@
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { TabOptions } from "@/routes/concurrency-limits";
import { getRouteApi } from "@tanstack/react-router";
+import type { JSX } from "react";
const routeApi = getRouteApi("/concurrency-limits");
diff --git a/ui-v2/src/components/flows/detail/index.tsx b/ui-v2/src/components/flows/detail/index.tsx
index 89ffdd99ba1b..6708d891cb51 100644
--- a/ui-v2/src/components/flows/detail/index.tsx
+++ b/ui-v2/src/components/flows/detail/index.tsx
@@ -1,7 +1,7 @@
-import { components } from "@/api/prefect";
import { DataTable } from "@/components/ui/data-table";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { useNavigate } from "@tanstack/react-router";
+import { JSX } from "react";
import { columns as deploymentColumns } from "./deployment-columns";
import {
getFlowMetadata,
@@ -9,6 +9,7 @@ import {
} from "./metadata-columns";
import { columns as flowRunColumns } from "./runs-columns";
+import { components } from "@/api/prefect";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
@@ -99,7 +100,7 @@ export default function FlowDetail({
activity: components["schemas"]["FlowRun"][];
deployments: components["schemas"]["DeploymentResponse"][];
tab: "runs" | "deployments" | "details";
-}): React.ReactElement {
+}): JSX.Element {
const navigate = useNavigate();
console.log(activity);
diff --git a/ui-v2/src/components/ui/docs-link.tsx b/ui-v2/src/components/ui/docs-link.tsx
index 48cb63576219..9d37b979c837 100644
--- a/ui-v2/src/components/ui/docs-link.tsx
+++ b/ui-v2/src/components/ui/docs-link.tsx
@@ -1,3 +1,5 @@
+import type { JSX } from "react";
+
import { Icon } from "./icons";
import { Button } from "./button";
diff --git a/ui-v2/src/components/ui/empty-state.stories.tsx b/ui-v2/src/components/ui/empty-state.stories.tsx
index dcbe9ea20647..8627903be026 100644
--- a/ui-v2/src/components/ui/empty-state.stories.tsx
+++ b/ui-v2/src/components/ui/empty-state.stories.tsx
@@ -1,3 +1,5 @@
+import type { JSX } from "react";
+
import type { Meta, StoryObj } from "@storybook/react";
import { Icon } from "@/components/ui/icons";
diff --git a/ui-v2/src/components/ui/empty-state.tsx b/ui-v2/src/components/ui/empty-state.tsx
index 65ec02571350..cedab6cc58be 100644
--- a/ui-v2/src/components/ui/empty-state.tsx
+++ b/ui-v2/src/components/ui/empty-state.tsx
@@ -1,4 +1,5 @@
import { Card, CardContent } from "@/components/ui/card";
+import type { JSX } from "react";
import { Icon, type IconId } from "./icons";
const EmptyStateIcon = ({ id }: { id: IconId }): JSX.Element => {
diff --git a/ui-v2/src/components/ui/icons/icons.stories.tsx b/ui-v2/src/components/ui/icons/icons.stories.tsx
index fb9d144b7f89..ccd653d40b82 100644
--- a/ui-v2/src/components/ui/icons/icons.stories.tsx
+++ b/ui-v2/src/components/ui/icons/icons.stories.tsx
@@ -1,5 +1,6 @@
import { Label } from "@/components/ui/label";
import type { Meta, StoryObj } from "@storybook/react";
+import type { JSX } from "react";
import { ICONS, type IconId } from "./constants";
import { Icon } from "./icon";
diff --git a/ui-v2/src/hooks/use-is-overflowing.ts b/ui-v2/src/hooks/use-is-overflowing.ts
index a2890c14f70b..6b999659c613 100644
--- a/ui-v2/src/hooks/use-is-overflowing.ts
+++ b/ui-v2/src/hooks/use-is-overflowing.ts
@@ -1,6 +1,6 @@
import { type RefObject, useState, useEffect } from "react";
-export const useIsOverflowing = (ref: RefObject) => {
+export const useIsOverflowing = (ref: RefObject) => {
const [isOverflowing, setIsOverflowing] = useState(false);
useEffect(() => {
if (ref.current) {
From e80e8a9cbcda5986c0292d6bc5f9fc34b2538517 Mon Sep 17 00:00:00 2001
From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com>
Date: Sat, 7 Dec 2024 18:12:25 -0800
Subject: [PATCH 34/92] [UI v2] feat: Adding basic table for global concurrency
limit (#16266)
---
.../data-table/data-table.tsx | 37 +++++++++++++++++++
.../data-table/index.ts | 1 +
.../global-concurrency-view/index.tsx | 13 ++-----
ui-v2/src/routes/concurrency-limits.tsx | 8 ++--
4 files changed, 45 insertions(+), 14 deletions(-)
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/data-table/index.ts
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx
new file mode 100644
index 000000000000..4645d6c5e4bf
--- /dev/null
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx
@@ -0,0 +1,37 @@
+import { DataTable } from "@/components/ui/data-table";
+import { type GlobalConcurrencyLimit } from "@/hooks/global-concurrency-limits";
+import {
+ createColumnHelper,
+ getCoreRowModel,
+ useReactTable,
+} from "@tanstack/react-table";
+
+const columnHelper = createColumnHelper();
+const columns = [
+ columnHelper.accessor("name", {
+ header: "Name",
+ }),
+ columnHelper.accessor("limit", {
+ header: "Limit",
+ }),
+ columnHelper.accessor("active_slots", {
+ header: "Active Slots",
+ }),
+ columnHelper.accessor("slot_decay_per_second", {
+ header: "Slots Decay Per Second",
+ }),
+];
+
+type Props = {
+ data: Array;
+};
+
+export const GlobalConcurrencyDataTable = ({ data }: Props) => {
+ const table = useReactTable({
+ data,
+ columns,
+ getCoreRowModel: getCoreRowModel(),
+ });
+
+ return ;
+};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/data-table/index.ts b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/index.ts
new file mode 100644
index 000000000000..efe2adf76286
--- /dev/null
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/index.ts
@@ -0,0 +1 @@
+export { GlobalConcurrencyDataTable } from "./data-table";
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
index 4f14513692f3..53174ee0ad09 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/index.tsx
@@ -1,7 +1,7 @@
-import { Flex } from "@/components/ui/flex";
import { useListGlobalConcurrencyLimits } from "@/hooks/global-concurrency-limits";
import { useState } from "react";
import { CreateOrEditLimitDialog } from "./create-or-edit-limit-dialog";
+import { GlobalConcurrencyDataTable } from "./data-table";
import { GlobalConcurrencyLimitEmptyState } from "./global-concurrency-limit-empty-state";
import { GlobalConcurrencyLimitsHeader } from "./global-concurrency-limits-header";
@@ -18,15 +18,10 @@ export const GlobalConcurrencyView = () => {
{data.length === 0 ? (
) : (
-
+
-
TODO
-
- {data.map((limit) => (
- - {JSON.stringify(limit)}
- ))}
-
-
+
+
)}
["tab"];
From 24c7f082dbe99fb09e56349473320f1828dae1c3 Mon Sep 17 00:00:00 2001
From: Martijn Pieters
Date: Sun, 8 Dec 2024 02:24:58 +0000
Subject: [PATCH 35/92] [typing] prefect.client (#16265)
---
src/prefect/_internal/schemas/validators.py | 3 +-
src/prefect/client/__init__.py | 4 +-
src/prefect/client/base.py | 55 +-
src/prefect/client/cloud.py | 36 +-
src/prefect/client/orchestration.py | 572 ++++++++++----------
src/prefect/client/schemas/__init__.py | 24 +
src/prefect/client/schemas/actions.py | 246 +++++----
src/prefect/client/schemas/objects.py | 187 ++++---
src/prefect/client/schemas/responses.py | 36 +-
src/prefect/client/schemas/schedules.py | 229 ++++----
src/prefect/client/subscriptions.py | 16 +-
src/prefect/client/utilities.py | 72 ++-
src/prefect/main.py | 33 +-
13 files changed, 816 insertions(+), 697 deletions(-)
diff --git a/src/prefect/_internal/schemas/validators.py b/src/prefect/_internal/schemas/validators.py
index 9bda7fc5edff..cff72820e19b 100644
--- a/src/prefect/_internal/schemas/validators.py
+++ b/src/prefect/_internal/schemas/validators.py
@@ -13,6 +13,7 @@
from copy import copy
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union
+from uuid import UUID
import jsonschema
import pendulum
@@ -653,7 +654,7 @@ def validate_message_template_variables(v: Optional[str]) -> Optional[str]:
return v
-def validate_default_queue_id_not_none(v: Optional[str]) -> Optional[str]:
+def validate_default_queue_id_not_none(v: Optional[UUID]) -> UUID:
if v is None:
raise ValueError(
"`default_queue_id` is a required field. If you are "
diff --git a/src/prefect/client/__init__.py b/src/prefect/client/__init__.py
index 5d2fc25a2a9f..df0bfd34dcab 100644
--- a/src/prefect/client/__init__.py
+++ b/src/prefect/client/__init__.py
@@ -16,6 +16,8 @@
"""
+from collections.abc import Callable
+from typing import Any
from prefect._internal.compatibility.migration import getattr_migration
-__getattr__ = getattr_migration(__name__)
+__getattr__: Callable[[str], Any] = getattr_migration(__name__)
diff --git a/src/prefect/client/base.py b/src/prefect/client/base.py
index 5071387668aa..7eed8a92a497 100644
--- a/src/prefect/client/base.py
+++ b/src/prefect/client/base.py
@@ -4,22 +4,11 @@
import time
import uuid
from collections import defaultdict
+from collections.abc import AsyncGenerator, Awaitable, MutableMapping
from contextlib import asynccontextmanager
from datetime import datetime, timezone
-from typing import (
- Any,
- AsyncGenerator,
- Awaitable,
- Callable,
- Dict,
- MutableMapping,
- Optional,
- Protocol,
- Set,
- Tuple,
- Type,
- runtime_checkable,
-)
+from logging import Logger
+from typing import TYPE_CHECKING, Any, Callable, Optional, Protocol, runtime_checkable
import anyio
import httpx
@@ -46,14 +35,14 @@
# Datastores for lifespan management, keys should be a tuple of thread and app
# identities.
-APP_LIFESPANS: Dict[Tuple[int, int], LifespanManager] = {}
-APP_LIFESPANS_REF_COUNTS: Dict[Tuple[int, int], int] = {}
+APP_LIFESPANS: dict[tuple[int, int], LifespanManager] = {}
+APP_LIFESPANS_REF_COUNTS: dict[tuple[int, int], int] = {}
# Blocks concurrent access to the above dicts per thread. The index should be the thread
# identity.
-APP_LIFESPANS_LOCKS: Dict[int, anyio.Lock] = defaultdict(anyio.Lock)
+APP_LIFESPANS_LOCKS: dict[int, anyio.Lock] = defaultdict(anyio.Lock)
-logger = get_logger("client")
+logger: Logger = get_logger("client")
# Define ASGI application types for type checking
@@ -174,9 +163,9 @@ def raise_for_status(self) -> Response:
raise PrefectHTTPStatusError.from_httpx_error(exc) from exc.__cause__
@classmethod
- def from_httpx_response(cls: Type[Self], response: httpx.Response) -> Response:
+ def from_httpx_response(cls: type[Self], response: httpx.Response) -> Response:
"""
- Create a `PrefectReponse` from an `httpx.Response`.
+ Create a `PrefectResponse` from an `httpx.Response`.
By changing the `__class__` attribute of the Response, we change the method
resolution order to look for methods defined in PrefectResponse, while leaving
@@ -222,10 +211,10 @@ async def _send_with_retry(
self,
request: Request,
send: Callable[[Request], Awaitable[Response]],
- send_args: Tuple[Any, ...],
- send_kwargs: Dict[str, Any],
- retry_codes: Set[int] = set(),
- retry_exceptions: Tuple[Type[Exception], ...] = tuple(),
+ send_args: tuple[Any, ...],
+ send_kwargs: dict[str, Any],
+ retry_codes: set[int] = set(),
+ retry_exceptions: tuple[type[Exception], ...] = tuple(),
):
"""
Send a request and retry it if it fails.
@@ -240,6 +229,11 @@ async def _send_with_retry(
try_count = 0
response = None
+ if TYPE_CHECKING:
+ # older httpx versions type method as str | bytes | Unknown
+ # but in reality it is always a string.
+ assert isinstance(request.method, str) # type: ignore
+
is_change_request = request.method.lower() in {"post", "put", "patch", "delete"}
if self.enable_csrf_support and is_change_request:
@@ -436,10 +430,10 @@ def _send_with_retry(
self,
request: Request,
send: Callable[[Request], Response],
- send_args: Tuple[Any, ...],
- send_kwargs: Dict[str, Any],
- retry_codes: Set[int] = set(),
- retry_exceptions: Tuple[Type[Exception], ...] = tuple(),
+ send_args: tuple[Any, ...],
+ send_kwargs: dict[str, Any],
+ retry_codes: set[int] = set(),
+ retry_exceptions: tuple[type[Exception], ...] = tuple(),
):
"""
Send a request and retry it if it fails.
@@ -454,6 +448,11 @@ def _send_with_retry(
try_count = 0
response = None
+ if TYPE_CHECKING:
+ # older httpx versions type method as str | bytes | Unknown
+ # but in reality it is always a string.
+ assert isinstance(request.method, str) # type: ignore
+
is_change_request = request.method.lower() in {"post", "put", "patch", "delete"}
if self.enable_csrf_support and is_change_request:
diff --git a/src/prefect/client/cloud.py b/src/prefect/client/cloud.py
index 6542393ed4b7..90cae81b0f51 100644
--- a/src/prefect/client/cloud.py
+++ b/src/prefect/client/cloud.py
@@ -1,11 +1,12 @@
import re
-from typing import Any, Dict, List, Optional, cast
+from typing import Any, NoReturn, Optional, cast
from uuid import UUID
import anyio
import httpx
import pydantic
from starlette import status
+from typing_extensions import Self
import prefect.context
import prefect.settings
@@ -30,7 +31,7 @@
def get_cloud_client(
host: Optional[str] = None,
api_key: Optional[str] = None,
- httpx_settings: Optional[Dict[str, Any]] = None,
+ httpx_settings: Optional[dict[str, Any]] = None,
infer_cloud_url: bool = False,
) -> "CloudClient":
"""
@@ -62,11 +63,14 @@ class CloudUnauthorizedError(PrefectException):
class CloudClient:
+ account_id: Optional[str] = None
+ workspace_id: Optional[str] = None
+
def __init__(
self,
host: str,
api_key: str,
- httpx_settings: Optional[Dict[str, Any]] = None,
+ httpx_settings: Optional[dict[str, Any]] = None,
) -> None:
httpx_settings = httpx_settings or dict()
httpx_settings.setdefault("headers", dict())
@@ -79,7 +83,7 @@ def __init__(
**httpx_settings, enable_csrf_support=False
)
- api_url = prefect.settings.PREFECT_API_URL.value() or ""
+ api_url: str = prefect.settings.PREFECT_API_URL.value() or ""
if match := (
re.search(PARSE_API_URL_REGEX, host)
or re.search(PARSE_API_URL_REGEX, api_url)
@@ -100,7 +104,7 @@ def workspace_base_url(self) -> str:
return f"{self.account_base_url}/workspaces/{self.workspace_id}"
- async def api_healthcheck(self):
+ async def api_healthcheck(self) -> None:
"""
Attempts to connect to the Cloud API and raises the encountered exception if not
successful.
@@ -110,8 +114,8 @@ async def api_healthcheck(self):
with anyio.fail_after(10):
await self.read_workspaces()
- async def read_workspaces(self) -> List[Workspace]:
- workspaces = pydantic.TypeAdapter(List[Workspace]).validate_python(
+ async def read_workspaces(self) -> list[Workspace]:
+ workspaces = pydantic.TypeAdapter(list[Workspace]).validate_python(
await self.get("/me/workspaces")
)
return workspaces
@@ -124,17 +128,17 @@ async def read_current_workspace(self) -> Workspace:
return workspace
raise ValueError("Current workspace not found")
- async def read_worker_metadata(self) -> Dict[str, Any]:
+ async def read_worker_metadata(self) -> dict[str, Any]:
response = await self.get(
f"{self.workspace_base_url}/collections/work_pool_types"
)
- return cast(Dict[str, Any], response)
+ return cast(dict[str, Any], response)
- async def read_account_settings(self) -> Dict[str, Any]:
+ async def read_account_settings(self) -> dict[str, Any]:
response = await self.get(f"{self.account_base_url}/settings")
- return cast(Dict[str, Any], response)
+ return cast(dict[str, Any], response)
- async def update_account_settings(self, settings: Dict[str, Any]):
+ async def update_account_settings(self, settings: dict[str, Any]) -> None:
await self.request(
"PATCH",
f"{self.account_base_url}/settings",
@@ -145,7 +149,7 @@ async def read_account_ip_allowlist(self) -> IPAllowlist:
response = await self.get(f"{self.account_base_url}/ip_allowlist")
return IPAllowlist.model_validate(response)
- async def update_account_ip_allowlist(self, updated_allowlist: IPAllowlist):
+ async def update_account_ip_allowlist(self, updated_allowlist: IPAllowlist) -> None:
await self.request(
"PUT",
f"{self.account_base_url}/ip_allowlist",
@@ -175,20 +179,20 @@ async def update_flow_run_labels(
json=labels,
)
- async def __aenter__(self):
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_info: Any) -> None:
return await self._client.__aexit__(*exc_info)
- def __enter__(self):
+ def __enter__(self) -> NoReturn:
raise RuntimeError(
"The `CloudClient` must be entered with an async context. Use 'async "
"with CloudClient(...)' not 'with CloudClient(...)'"
)
- def __exit__(self, *_):
+ def __exit__(self, *_: object) -> NoReturn:
assert False, "This should never be called but must be defined for __enter__"
async def get(self, route: str, **kwargs: Any) -> Any:
diff --git a/src/prefect/client/orchestration.py b/src/prefect/client/orchestration.py
index ab83a4dcbeb9..f244ccde5708 100644
--- a/src/prefect/client/orchestration.py
+++ b/src/prefect/client/orchestration.py
@@ -2,21 +2,10 @@
import datetime
import ssl
import warnings
+from collections.abc import Iterable
from contextlib import AsyncExitStack
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- Iterable,
- List,
- Literal,
- Optional,
- Set,
- Tuple,
- TypeVar,
- Union,
- overload,
-)
+from logging import Logger
+from typing import TYPE_CHECKING, Any, Literal, NoReturn, Optional, Union, overload
from uuid import UUID, uuid4
import certifi
@@ -27,7 +16,7 @@
from asgi_lifespan import LifespanManager
from packaging import version
from starlette import status
-from typing_extensions import ParamSpec
+from typing_extensions import ParamSpec, Self, TypeVar
import prefect
import prefect.exceptions
@@ -152,26 +141,29 @@
)
P = ParamSpec("P")
-R = TypeVar("R")
+R = TypeVar("R", infer_variance=True)
+T = TypeVar("T")
@overload
def get_client(
- httpx_settings: Optional[Dict[str, Any]] = None, sync_client: Literal[False] = False
+ *,
+ httpx_settings: Optional[dict[str, Any]] = ...,
+ sync_client: Literal[False] = False,
) -> "PrefectClient":
...
@overload
def get_client(
- httpx_settings: Optional[Dict[str, Any]] = None, sync_client: Literal[True] = True
+ *, httpx_settings: Optional[dict[str, Any]] = ..., sync_client: Literal[True] = ...
) -> "SyncPrefectClient":
...
def get_client(
- httpx_settings: Optional[Dict[str, Any]] = None, sync_client: bool = False
-):
+ httpx_settings: Optional[dict[str, Any]] = None, sync_client: bool = False
+) -> Union["SyncPrefectClient", "PrefectClient"]:
"""
Retrieve a HTTP client for communicating with the Prefect REST API.
@@ -200,18 +192,21 @@ def get_client(
if sync_client:
if client_ctx := prefect.context.SyncClientContext.get():
- if client_ctx.client and client_ctx._httpx_settings == httpx_settings:
+ if (
+ client_ctx.client
+ and getattr(client_ctx, "_httpx_settings", None) == httpx_settings
+ ):
return client_ctx.client
else:
if client_ctx := prefect.context.AsyncClientContext.get():
if (
client_ctx.client
- and client_ctx._httpx_settings == httpx_settings
- and loop in (client_ctx.client._loop, None)
+ and getattr(client_ctx, "_httpx_settings", None) == httpx_settings
+ and loop in (getattr(client_ctx.client, "_loop", None), None)
):
return client_ctx.client
- api = PREFECT_API_URL.value()
+ api: str = PREFECT_API_URL.value()
server_type = None
if not api and PREFECT_SERVER_ALLOW_EPHEMERAL_MODE:
@@ -277,7 +272,7 @@ def __init__(
*,
api_key: Optional[str] = None,
api_version: Optional[str] = None,
- httpx_settings: Optional[Dict[str, Any]] = None,
+ httpx_settings: Optional[dict[str, Any]] = None,
server_type: Optional[ServerType] = None,
) -> None:
httpx_settings = httpx_settings.copy() if httpx_settings else {}
@@ -357,7 +352,7 @@ def __init__(
)
# Connect to an in-process application
- elif isinstance(api, ASGIApp):
+ else:
self._ephemeral_app = api
self.server_type = ServerType.EPHEMERAL
@@ -377,12 +372,6 @@ def __init__(
)
httpx_settings.setdefault("base_url", "http://ephemeral-prefect/api")
- else:
- raise TypeError(
- f"Unexpected type {type(api).__name__!r} for argument `api`. Expected"
- " 'str' or 'ASGIApp/FastAPI'"
- )
-
# See https://www.python-httpx.org/advanced/#timeout-configuration
httpx_settings.setdefault(
"timeout",
@@ -426,9 +415,9 @@ def __init__(
if isinstance(server_transport, httpx.AsyncHTTPTransport):
pool = getattr(server_transport, "_pool", None)
if isinstance(pool, httpcore.AsyncConnectionPool):
- pool._retries = 3
+ setattr(pool, "_retries", 3)
- self.logger = get_logger("client")
+ self.logger: Logger = get_logger("client")
@property
def api_url(self) -> httpx.URL:
@@ -458,7 +447,7 @@ async def hello(self) -> httpx.Response:
"""
return await self._client.get("/hello")
- async def create_flow(self, flow: "FlowObject") -> UUID:
+ async def create_flow(self, flow: "FlowObject[Any, Any]") -> UUID:
"""
Create a flow in the Prefect API.
@@ -514,16 +503,16 @@ async def read_flow(self, flow_id: UUID) -> Flow:
async def read_flows(
self,
*,
- flow_filter: FlowFilter = None,
- flow_run_filter: FlowRunFilter = None,
- task_run_filter: TaskRunFilter = None,
- deployment_filter: DeploymentFilter = None,
- work_pool_filter: WorkPoolFilter = None,
- work_queue_filter: WorkQueueFilter = None,
- sort: FlowSort = None,
+ flow_filter: Optional[FlowFilter] = None,
+ flow_run_filter: Optional[FlowRunFilter] = None,
+ task_run_filter: Optional[TaskRunFilter] = None,
+ deployment_filter: Optional[DeploymentFilter] = None,
+ work_pool_filter: Optional[WorkPoolFilter] = None,
+ work_queue_filter: Optional[WorkQueueFilter] = None,
+ sort: Optional[FlowSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[Flow]:
+ ) -> list[Flow]:
"""
Query the Prefect API for flows. Only flows matching all criteria will
be returned.
@@ -542,7 +531,7 @@ async def read_flows(
Returns:
a list of Flow model representations of the flows
"""
- body = {
+ body: dict[str, Any] = {
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
"flow_runs": (
flow_run_filter.model_dump(mode="json", exclude_unset=True)
@@ -567,7 +556,7 @@ async def read_flows(
}
response = await self._client.post("/flows/filter", json=body)
- return pydantic.TypeAdapter(List[Flow]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Flow]).validate_python(response.json())
async def read_flow_by_name(
self,
@@ -589,15 +578,15 @@ async def create_flow_run_from_deployment(
self,
deployment_id: UUID,
*,
- parameters: Optional[Dict[str, Any]] = None,
- context: Optional[Dict[str, Any]] = None,
- state: Optional[prefect.states.State] = None,
+ parameters: Optional[dict[str, Any]] = None,
+ context: Optional[dict[str, Any]] = None,
+ state: Optional[prefect.states.State[Any]] = None,
name: Optional[str] = None,
tags: Optional[Iterable[str]] = None,
idempotency_key: Optional[str] = None,
parent_task_run_id: Optional[UUID] = None,
work_queue_name: Optional[str] = None,
- job_variables: Optional[Dict[str, Any]] = None,
+ job_variables: Optional[dict[str, Any]] = None,
) -> FlowRun:
"""
Create a flow run for a deployment.
@@ -638,7 +627,7 @@ async def create_flow_run_from_deployment(
parameters=parameters,
context=context,
state=state.to_state_create(),
- tags=tags,
+ tags=list(tags),
name=name,
idempotency_key=idempotency_key,
parent_task_run_id=parent_task_run_id,
@@ -657,13 +646,13 @@ async def create_flow_run_from_deployment(
async def create_flow_run(
self,
- flow: "FlowObject",
+ flow: "FlowObject[Any, R]",
name: Optional[str] = None,
- parameters: Optional[Dict[str, Any]] = None,
- context: Optional[Dict[str, Any]] = None,
+ parameters: Optional[dict[str, Any]] = None,
+ context: Optional[dict[str, Any]] = None,
tags: Optional[Iterable[str]] = None,
parent_task_run_id: Optional[UUID] = None,
- state: Optional["prefect.states.State"] = None,
+ state: Optional["prefect.states.State[R]"] = None,
) -> FlowRun:
"""
Create a flow run for a flow.
@@ -705,7 +694,7 @@ async def create_flow_run(
state=state.to_state_create(),
empirical_policy=FlowRunPolicy(
retries=flow.retries,
- retry_delay=flow.retry_delay_seconds,
+ retry_delay=int(flow.retry_delay_seconds or 0),
),
)
@@ -723,12 +712,12 @@ async def update_flow_run(
self,
flow_run_id: UUID,
flow_version: Optional[str] = None,
- parameters: Optional[dict] = None,
+ parameters: Optional[dict[str, Any]] = None,
name: Optional[str] = None,
tags: Optional[Iterable[str]] = None,
empirical_policy: Optional[FlowRunPolicy] = None,
infrastructure_pid: Optional[str] = None,
- job_variables: Optional[dict] = None,
+ job_variables: Optional[dict[str, Any]] = None,
) -> httpx.Response:
"""
Update a flow run's details.
@@ -749,7 +738,7 @@ async def update_flow_run(
Returns:
an `httpx.Response` object from the PATCH request
"""
- params = {}
+ params: dict[str, Any] = {}
if flow_version is not None:
params["flow_version"] = flow_version
if parameters is not None:
@@ -832,7 +821,7 @@ async def create_concurrency_limit(
async def read_concurrency_limit_by_tag(
self,
tag: str,
- ):
+ ) -> ConcurrencyLimit:
"""
Read the concurrency limit set on a specific tag.
@@ -868,7 +857,7 @@ async def read_concurrency_limits(
self,
limit: int,
offset: int,
- ):
+ ) -> list[ConcurrencyLimit]:
"""
Lists concurrency limits set on task run tags.
@@ -886,15 +875,15 @@ async def read_concurrency_limits(
}
response = await self._client.post("/concurrency_limits/filter", json=body)
- return pydantic.TypeAdapter(List[ConcurrencyLimit]).validate_python(
+ return pydantic.TypeAdapter(list[ConcurrencyLimit]).validate_python(
response.json()
)
async def reset_concurrency_limit_by_tag(
self,
tag: str,
- slot_override: Optional[List[Union[UUID, str]]] = None,
- ):
+ slot_override: Optional[list[Union[UUID, str]]] = None,
+ ) -> None:
"""
Resets the concurrency limit slots set on a specific tag.
@@ -927,7 +916,7 @@ async def reset_concurrency_limit_by_tag(
async def delete_concurrency_limit_by_tag(
self,
tag: str,
- ):
+ ) -> None:
"""
Delete the concurrency limit set on a specific tag.
@@ -951,7 +940,7 @@ async def delete_concurrency_limit_by_tag(
async def increment_v1_concurrency_slots(
self,
- names: List[str],
+ names: list[str],
task_run_id: UUID,
) -> httpx.Response:
"""
@@ -961,7 +950,7 @@ async def increment_v1_concurrency_slots(
names (List[str]): A list of limit names for which to increment limits.
task_run_id (UUID): The task run ID incrementing the limits.
"""
- data = {
+ data: dict[str, Any] = {
"names": names,
"task_run_id": str(task_run_id),
}
@@ -973,7 +962,7 @@ async def increment_v1_concurrency_slots(
async def decrement_v1_concurrency_slots(
self,
- names: List[str],
+ names: list[str],
task_run_id: UUID,
occupancy_seconds: float,
) -> httpx.Response:
@@ -989,7 +978,7 @@ async def decrement_v1_concurrency_slots(
Returns:
httpx.Response: The HTTP response from the server.
"""
- data = {
+ data: dict[str, Any] = {
"names": names,
"task_run_id": str(task_run_id),
"occupancy_seconds": occupancy_seconds,
@@ -1089,7 +1078,7 @@ async def read_work_queue_by_name(
return WorkQueue.model_validate(response.json())
- async def update_work_queue(self, id: UUID, **kwargs):
+ async def update_work_queue(self, id: UUID, **kwargs: Any) -> None:
"""
Update properties of a work queue.
@@ -1119,8 +1108,8 @@ async def get_runs_in_work_queue(
self,
id: UUID,
limit: int = 10,
- scheduled_before: datetime.datetime = None,
- ) -> List[FlowRun]:
+ scheduled_before: Optional[datetime.datetime] = None,
+ ) -> list[FlowRun]:
"""
Read flow runs off a work queue.
@@ -1153,7 +1142,7 @@ async def get_runs_in_work_queue(
raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
else:
raise
- return pydantic.TypeAdapter(List[FlowRun]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[FlowRun]).validate_python(response.json())
async def read_work_queue(
self,
@@ -1209,9 +1198,9 @@ async def read_work_queue_status(
async def match_work_queues(
self,
- prefixes: List[str],
+ prefixes: list[str],
work_pool_name: Optional[str] = None,
- ) -> List[WorkQueue]:
+ ) -> list[WorkQueue]:
"""
Query the Prefect API for work queues with names with a specific prefix.
@@ -1225,7 +1214,7 @@ async def match_work_queues(
"""
page_length = 100
current_page = 0
- work_queues = []
+ work_queues: list[WorkQueue] = []
while True:
new_queues = await self.read_work_queues(
@@ -1246,7 +1235,7 @@ async def match_work_queues(
async def delete_work_queue_by_id(
self,
id: UUID,
- ):
+ ) -> None:
"""
Delete a work queue by its ID.
@@ -1343,7 +1332,7 @@ async def update_block_document(
self,
block_document_id: UUID,
block_document: BlockDocumentUpdate,
- ):
+ ) -> None:
"""
Update a block document in the Prefect API.
"""
@@ -1362,7 +1351,7 @@ async def update_block_document(
else:
raise
- async def delete_block_document(self, block_document_id: UUID):
+ async def delete_block_document(self, block_document_id: UUID) -> None:
"""
Delete a block document.
"""
@@ -1405,7 +1394,9 @@ async def read_block_schema_by_checksum(
raise
return BlockSchema.model_validate(response.json())
- async def update_block_type(self, block_type_id: UUID, block_type: BlockTypeUpdate):
+ async def update_block_type(
+ self, block_type_id: UUID, block_type: BlockTypeUpdate
+ ) -> None:
"""
Update a block document in the Prefect API.
"""
@@ -1424,7 +1415,7 @@ async def update_block_type(self, block_type_id: UUID, block_type: BlockTypeUpda
else:
raise
- async def delete_block_type(self, block_type_id: UUID):
+ async def delete_block_type(self, block_type_id: UUID) -> None:
"""
Delete a block type.
"""
@@ -1444,7 +1435,7 @@ async def delete_block_type(self, block_type_id: UUID):
else:
raise
- async def read_block_types(self) -> List[BlockType]:
+ async def read_block_types(self) -> list[BlockType]:
"""
Read all block types
Raises:
@@ -1454,9 +1445,9 @@ async def read_block_types(self) -> List[BlockType]:
List of BlockTypes.
"""
response = await self._client.post("/block_types/filter", json={})
- return pydantic.TypeAdapter(List[BlockType]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[BlockType]).validate_python(response.json())
- async def read_block_schemas(self) -> List[BlockSchema]:
+ async def read_block_schemas(self) -> list[BlockSchema]:
"""
Read all block schemas
Raises:
@@ -1466,7 +1457,7 @@ async def read_block_schemas(self) -> List[BlockSchema]:
A BlockSchema.
"""
response = await self._client.post("/block_schemas/filter", json={})
- return pydantic.TypeAdapter(List[BlockSchema]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[BlockSchema]).validate_python(response.json())
async def get_most_recent_block_schema_for_block_type(
self,
@@ -1502,7 +1493,7 @@ async def read_block_document(
self,
block_document_id: UUID,
include_secrets: bool = True,
- ):
+ ) -> BlockDocument:
"""
Read the block document with the specified ID.
@@ -1580,7 +1571,7 @@ async def read_block_documents(
offset: Optional[int] = None,
limit: Optional[int] = None,
include_secrets: bool = True,
- ):
+ ) -> list[BlockDocument]:
"""
Read block documents
@@ -1607,7 +1598,7 @@ async def read_block_documents(
include_secrets=include_secrets,
),
)
- return pydantic.TypeAdapter(List[BlockDocument]).validate_python(
+ return pydantic.TypeAdapter(list[BlockDocument]).validate_python(
response.json()
)
@@ -1617,7 +1608,7 @@ async def read_block_documents_by_type(
offset: Optional[int] = None,
limit: Optional[int] = None,
include_secrets: bool = True,
- ) -> List[BlockDocument]:
+ ) -> list[BlockDocument]:
"""Retrieve block documents by block type slug.
Args:
@@ -1638,7 +1629,7 @@ async def read_block_documents_by_type(
),
)
- return pydantic.TypeAdapter(List[BlockDocument]).validate_python(
+ return pydantic.TypeAdapter(list[BlockDocument]).validate_python(
response.json()
)
@@ -1647,23 +1638,23 @@ async def create_deployment(
flow_id: UUID,
name: str,
version: Optional[str] = None,
- schedules: Optional[List[DeploymentScheduleCreate]] = None,
+ schedules: Optional[list[DeploymentScheduleCreate]] = None,
concurrency_limit: Optional[int] = None,
concurrency_options: Optional[ConcurrencyOptions] = None,
- parameters: Optional[Dict[str, Any]] = None,
+ parameters: Optional[dict[str, Any]] = None,
description: Optional[str] = None,
work_queue_name: Optional[str] = None,
work_pool_name: Optional[str] = None,
- tags: Optional[List[str]] = None,
+ tags: Optional[list[str]] = None,
storage_document_id: Optional[UUID] = None,
path: Optional[str] = None,
entrypoint: Optional[str] = None,
infrastructure_document_id: Optional[UUID] = None,
- parameter_openapi_schema: Optional[Dict[str, Any]] = None,
+ parameter_openapi_schema: Optional[dict[str, Any]] = None,
paused: Optional[bool] = None,
- pull_steps: Optional[List[dict]] = None,
+ pull_steps: Optional[list[dict[str, Any]]] = None,
enforce_parameter_schema: Optional[bool] = None,
- job_variables: Optional[Dict[str, Any]] = None,
+ job_variables: Optional[dict[str, Any]] = None,
) -> UUID:
"""
Create a deployment.
@@ -1743,7 +1734,9 @@ async def create_deployment(
return UUID(deployment_id)
- async def set_deployment_paused_state(self, deployment_id: UUID, paused: bool):
+ async def set_deployment_paused_state(
+ self, deployment_id: UUID, paused: bool
+ ) -> None:
await self._client.patch(
f"/deployments/{deployment_id}", json={"paused": paused}
)
@@ -1752,7 +1745,7 @@ async def update_deployment(
self,
deployment_id: UUID,
deployment: DeploymentUpdate,
- ):
+ ) -> None:
await self._client.patch(
f"/deployments/{deployment_id}",
json=deployment.model_dump(mode="json", exclude_unset=True),
@@ -1775,7 +1768,7 @@ async def _create_deployment_from_schema(self, schema: DeploymentCreate) -> UUID
async def read_deployment(
self,
- deployment_id: UUID,
+ deployment_id: Union[UUID, str],
) -> DeploymentResponse:
"""
Query the Prefect API for a deployment by id.
@@ -1868,7 +1861,7 @@ async def read_deployments(
limit: Optional[int] = None,
sort: Optional[DeploymentSort] = None,
offset: int = 0,
- ) -> List[DeploymentResponse]:
+ ) -> list[DeploymentResponse]:
"""
Query the Prefect API for deployments. Only deployments matching all
the provided criteria will be returned.
@@ -1887,7 +1880,7 @@ async def read_deployments(
a list of Deployment model representations
of the deployments
"""
- body = {
+ body: dict[str, Any] = {
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
"flow_runs": (
flow_run_filter.model_dump(mode="json", exclude_unset=True)
@@ -1912,14 +1905,14 @@ async def read_deployments(
}
response = await self._client.post("/deployments/filter", json=body)
- return pydantic.TypeAdapter(List[DeploymentResponse]).validate_python(
+ return pydantic.TypeAdapter(list[DeploymentResponse]).validate_python(
response.json()
)
async def delete_deployment(
self,
deployment_id: UUID,
- ):
+ ) -> None:
"""
Delete deployment by id.
@@ -1940,8 +1933,8 @@ async def delete_deployment(
async def create_deployment_schedules(
self,
deployment_id: UUID,
- schedules: List[Tuple[SCHEDULE_TYPES, bool]],
- ) -> List[DeploymentSchedule]:
+ schedules: list[tuple[SCHEDULE_TYPES, bool]],
+ ) -> list[DeploymentSchedule]:
"""
Create deployment schedules.
@@ -1968,14 +1961,14 @@ async def create_deployment_schedules(
response = await self._client.post(
f"/deployments/{deployment_id}/schedules", json=json
)
- return pydantic.TypeAdapter(List[DeploymentSchedule]).validate_python(
+ return pydantic.TypeAdapter(list[DeploymentSchedule]).validate_python(
response.json()
)
async def read_deployment_schedules(
self,
deployment_id: UUID,
- ) -> List[DeploymentSchedule]:
+ ) -> list[DeploymentSchedule]:
"""
Query the Prefect API for a deployment's schedules.
@@ -1992,7 +1985,7 @@ async def read_deployment_schedules(
raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
else:
raise
- return pydantic.TypeAdapter(List[DeploymentSchedule]).validate_python(
+ return pydantic.TypeAdapter(list[DeploymentSchedule]).validate_python(
response.json()
)
@@ -2002,7 +1995,7 @@ async def update_deployment_schedule(
schedule_id: UUID,
active: Optional[bool] = None,
schedule: Optional[SCHEDULE_TYPES] = None,
- ):
+ ) -> None:
"""
Update a deployment schedule by ID.
@@ -2012,7 +2005,7 @@ async def update_deployment_schedule(
active: whether or not the schedule should be active
schedule: the cron, rrule, or interval schedule this deployment schedule should use
"""
- kwargs = {}
+ kwargs: dict[str, Any] = {}
if active is not None:
kwargs["active"] = active
if schedule is not None:
@@ -2076,8 +2069,8 @@ async def read_flow_run(self, flow_run_id: UUID) -> FlowRun:
return FlowRun.model_validate(response.json())
async def resume_flow_run(
- self, flow_run_id: UUID, run_input: Optional[Dict] = None
- ) -> OrchestrationResult:
+ self, flow_run_id: UUID, run_input: Optional[dict[str, Any]] = None
+ ) -> OrchestrationResult[Any]:
"""
Resumes a paused flow run.
@@ -2095,21 +2088,24 @@ async def resume_flow_run(
except httpx.HTTPStatusError:
raise
- return OrchestrationResult.model_validate(response.json())
+ result: OrchestrationResult[Any] = OrchestrationResult.model_validate(
+ response.json()
+ )
+ return result
async def read_flow_runs(
self,
*,
- flow_filter: FlowFilter = None,
- flow_run_filter: FlowRunFilter = None,
- task_run_filter: TaskRunFilter = None,
- deployment_filter: DeploymentFilter = None,
- work_pool_filter: WorkPoolFilter = None,
- work_queue_filter: WorkQueueFilter = None,
- sort: FlowRunSort = None,
+ flow_filter: Optional[FlowFilter] = None,
+ flow_run_filter: Optional[FlowRunFilter] = None,
+ task_run_filter: Optional[TaskRunFilter] = None,
+ deployment_filter: Optional[DeploymentFilter] = None,
+ work_pool_filter: Optional[WorkPoolFilter] = None,
+ work_queue_filter: Optional[WorkQueueFilter] = None,
+ sort: Optional[FlowRunSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[FlowRun]:
+ ) -> list[FlowRun]:
"""
Query the Prefect API for flow runs. Only flow runs matching all criteria will
be returned.
@@ -2129,7 +2125,7 @@ async def read_flow_runs(
a list of Flow Run model representations
of the flow runs
"""
- body = {
+ body: dict[str, Any] = {
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
"flow_runs": (
flow_run_filter.model_dump(mode="json", exclude_unset=True)
@@ -2154,14 +2150,14 @@ async def read_flow_runs(
}
response = await self._client.post("/flow_runs/filter", json=body)
- return pydantic.TypeAdapter(List[FlowRun]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[FlowRun]).validate_python(response.json())
async def set_flow_run_state(
self,
- flow_run_id: UUID,
- state: "prefect.states.State",
+ flow_run_id: Union[UUID, str],
+ state: "prefect.states.State[T]",
force: bool = False,
- ) -> OrchestrationResult:
+ ) -> OrchestrationResult[T]:
"""
Set the state of a flow run.
@@ -2194,11 +2190,14 @@ async def set_flow_run_state(
else:
raise
- return OrchestrationResult.model_validate(response.json())
+ result: OrchestrationResult[T] = OrchestrationResult.model_validate(
+ response.json()
+ )
+ return result
async def read_flow_run_states(
self, flow_run_id: UUID
- ) -> List[prefect.states.State]:
+ ) -> list[prefect.states.State]:
"""
Query for the states of a flow run
@@ -2212,18 +2211,18 @@ async def read_flow_run_states(
response = await self._client.get(
"/flow_run_states/", params=dict(flow_run_id=str(flow_run_id))
)
- return pydantic.TypeAdapter(List[prefect.states.State]).validate_python(
+ return pydantic.TypeAdapter(list[prefect.states.State]).validate_python(
response.json()
)
- async def set_flow_run_name(self, flow_run_id: UUID, name: str):
+ async def set_flow_run_name(self, flow_run_id: UUID, name: str) -> httpx.Response:
flow_run_data = FlowRunUpdate(name=name)
return await self._client.patch(
f"/flow_runs/{flow_run_id}",
json=flow_run_data.model_dump(mode="json", exclude_unset=True),
)
- async def set_task_run_name(self, task_run_id: UUID, name: str):
+ async def set_task_run_name(self, task_run_id: UUID, name: str) -> httpx.Response:
task_run_data = TaskRunUpdate(name=name)
return await self._client.patch(
f"/task_runs/{task_run_id}",
@@ -2240,9 +2239,9 @@ async def create_task_run(
extra_tags: Optional[Iterable[str]] = None,
state: Optional[prefect.states.State[R]] = None,
task_inputs: Optional[
- Dict[
+ dict[
str,
- List[
+ list[
Union[
TaskRunResult,
Parameter,
@@ -2276,6 +2275,12 @@ async def create_task_run(
if state is None:
state = prefect.states.Pending()
+ retry_delay = task.retry_delay_seconds
+ if isinstance(retry_delay, list):
+ retry_delay = [int(rd) for rd in retry_delay]
+ elif isinstance(retry_delay, float):
+ retry_delay = int(retry_delay)
+
task_run_data = TaskRunCreate(
id=id,
name=name,
@@ -2286,7 +2291,7 @@ async def create_task_run(
task_version=task.version,
empirical_policy=TaskRunPolicy(
retries=task.retries,
- retry_delay=task.retry_delay_seconds,
+ retry_delay=retry_delay,
retry_jitter_factor=task.retry_jitter_factor,
),
state=state.to_state_create(),
@@ -2319,14 +2324,14 @@ async def read_task_run(self, task_run_id: UUID) -> TaskRun:
async def read_task_runs(
self,
*,
- flow_filter: FlowFilter = None,
- flow_run_filter: FlowRunFilter = None,
- task_run_filter: TaskRunFilter = None,
- deployment_filter: DeploymentFilter = None,
- sort: TaskRunSort = None,
+ flow_filter: Optional[FlowFilter] = None,
+ flow_run_filter: Optional[FlowRunFilter] = None,
+ task_run_filter: Optional[TaskRunFilter] = None,
+ deployment_filter: Optional[DeploymentFilter] = None,
+ sort: Optional[TaskRunSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[TaskRun]:
+ ) -> list[TaskRun]:
"""
Query the Prefect API for task runs. Only task runs matching all criteria will
be returned.
@@ -2344,7 +2349,7 @@ async def read_task_runs(
a list of Task Run model representations
of the task runs
"""
- body = {
+ body: dict[str, Any] = {
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
"flow_runs": (
flow_run_filter.model_dump(mode="json", exclude_unset=True)
@@ -2362,7 +2367,7 @@ async def read_task_runs(
"offset": offset,
}
response = await self._client.post("/task_runs/filter", json=body)
- return pydantic.TypeAdapter(List[TaskRun]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[TaskRun]).validate_python(response.json())
async def delete_task_run(self, task_run_id: UUID) -> None:
"""
@@ -2385,9 +2390,9 @@ async def delete_task_run(self, task_run_id: UUID) -> None:
async def set_task_run_state(
self,
task_run_id: UUID,
- state: prefect.states.State,
+ state: prefect.states.State[T],
force: bool = False,
- ) -> OrchestrationResult:
+ ) -> OrchestrationResult[T]:
"""
Set the state of a task run.
@@ -2406,11 +2411,14 @@ async def set_task_run_state(
f"/task_runs/{task_run_id}/set_state",
json=dict(state=state_create.model_dump(mode="json"), force=force),
)
- return OrchestrationResult.model_validate(response.json())
+ result: OrchestrationResult[T] = OrchestrationResult.model_validate(
+ response.json()
+ )
+ return result
async def read_task_run_states(
self, task_run_id: UUID
- ) -> List[prefect.states.State]:
+ ) -> list[prefect.states.State]:
"""
Query for the states of a task run
@@ -2423,11 +2431,13 @@ async def read_task_run_states(
response = await self._client.get(
"/task_run_states/", params=dict(task_run_id=str(task_run_id))
)
- return pydantic.TypeAdapter(List[prefect.states.State]).validate_python(
+ return pydantic.TypeAdapter(list[prefect.states.State]).validate_python(
response.json()
)
- async def create_logs(self, logs: Iterable[Union[LogCreate, dict]]) -> None:
+ async def create_logs(
+ self, logs: Iterable[Union[LogCreate, dict[str, Any]]]
+ ) -> None:
"""
Create logs for a flow or task run
@@ -2444,8 +2454,8 @@ async def create_flow_run_notification_policy(
self,
block_document_id: UUID,
is_active: bool = True,
- tags: List[str] = None,
- state_names: List[str] = None,
+ tags: Optional[list[str]] = None,
+ state_names: Optional[list[str]] = None,
message_template: Optional[str] = None,
) -> UUID:
"""
@@ -2507,8 +2517,8 @@ async def update_flow_run_notification_policy(
id: UUID,
block_document_id: Optional[UUID] = None,
is_active: Optional[bool] = None,
- tags: Optional[List[str]] = None,
- state_names: Optional[List[str]] = None,
+ tags: Optional[list[str]] = None,
+ state_names: Optional[list[str]] = None,
message_template: Optional[str] = None,
) -> None:
"""
@@ -2525,7 +2535,7 @@ async def update_flow_run_notification_policy(
prefect.exceptions.ObjectNotFound: If request returns 404
httpx.RequestError: If requests fails
"""
- params = {}
+ params: dict[str, Any] = {}
if block_document_id is not None:
params["block_document_id"] = block_document_id
if is_active is not None:
@@ -2555,7 +2565,7 @@ async def read_flow_run_notification_policies(
flow_run_notification_policy_filter: FlowRunNotificationPolicyFilter,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[FlowRunNotificationPolicy]:
+ ) -> list[FlowRunNotificationPolicy]:
"""
Query the Prefect API for flow run notification policies. Only policies matching all criteria will
be returned.
@@ -2569,7 +2579,7 @@ async def read_flow_run_notification_policies(
a list of FlowRunNotificationPolicy model representations
of the notification policies
"""
- body = {
+ body: dict[str, Any] = {
"flow_run_notification_policy_filter": (
flow_run_notification_policy_filter.model_dump(mode="json")
if flow_run_notification_policy_filter
@@ -2581,7 +2591,7 @@ async def read_flow_run_notification_policies(
response = await self._client.post(
"/flow_run_notification_policies/filter", json=body
)
- return pydantic.TypeAdapter(List[FlowRunNotificationPolicy]).validate_python(
+ return pydantic.TypeAdapter(list[FlowRunNotificationPolicy]).validate_python(
response.json()
)
@@ -2591,11 +2601,11 @@ async def read_logs(
limit: Optional[int] = None,
offset: Optional[int] = None,
sort: LogSort = LogSort.TIMESTAMP_ASC,
- ) -> List[Log]:
+ ) -> list[Log]:
"""
Read flow and task run logs.
"""
- body = {
+ body: dict[str, Any] = {
"logs": log_filter.model_dump(mode="json") if log_filter else None,
"limit": limit,
"offset": offset,
@@ -2603,7 +2613,7 @@ async def read_logs(
}
response = await self._client.post("/logs/filter", json=body)
- return pydantic.TypeAdapter(List[Log]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Log]).validate_python(response.json())
async def send_worker_heartbeat(
self,
@@ -2622,7 +2632,7 @@ async def send_worker_heartbeat(
return_id: Whether to return the worker ID. Note: will return `None` if the connected server does not support returning worker IDs, even if `return_id` is `True`.
worker_metadata: Metadata about the worker to send to the server.
"""
- params = {
+ params: dict[str, Any] = {
"name": worker_name,
"heartbeat_interval_seconds": heartbeat_interval_seconds,
}
@@ -2654,7 +2664,7 @@ async def read_workers_for_work_pool(
worker_filter: Optional[WorkerFilter] = None,
offset: Optional[int] = None,
limit: Optional[int] = None,
- ) -> List[Worker]:
+ ) -> list[Worker]:
"""
Reads workers for a given work pool.
@@ -2678,7 +2688,7 @@ async def read_workers_for_work_pool(
},
)
- return pydantic.TypeAdapter(List[Worker]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Worker]).validate_python(response.json())
async def read_work_pool(self, work_pool_name: str) -> WorkPool:
"""
@@ -2705,7 +2715,7 @@ async def read_work_pools(
limit: Optional[int] = None,
offset: int = 0,
work_pool_filter: Optional[WorkPoolFilter] = None,
- ) -> List[WorkPool]:
+ ) -> list[WorkPool]:
"""
Reads work pools.
@@ -2718,7 +2728,7 @@ async def read_work_pools(
A list of work pools.
"""
- body = {
+ body: dict[str, Any] = {
"limit": limit,
"offset": offset,
"work_pools": (
@@ -2726,7 +2736,7 @@ async def read_work_pools(
),
}
response = await self._client.post("/work_pools/filter", json=body)
- return pydantic.TypeAdapter(List[WorkPool]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[WorkPool]).validate_python(response.json())
async def create_work_pool(
self,
@@ -2776,7 +2786,7 @@ async def update_work_pool(
self,
work_pool_name: str,
work_pool: WorkPoolUpdate,
- ):
+ ) -> None:
"""
Updates a work pool.
@@ -2798,7 +2808,7 @@ async def update_work_pool(
async def delete_work_pool(
self,
work_pool_name: str,
- ):
+ ) -> None:
"""
Deletes a work pool.
@@ -2819,7 +2829,7 @@ async def read_work_queues(
work_queue_filter: Optional[WorkQueueFilter] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
- ) -> List[WorkQueue]:
+ ) -> list[WorkQueue]:
"""
Retrieves queues for a work pool.
@@ -2832,7 +2842,7 @@ async def read_work_queues(
Returns:
List of queues for the specified work pool.
"""
- json = {
+ json: dict[str, Any] = {
"work_queues": (
work_queue_filter.model_dump(mode="json", exclude_unset=True)
if work_queue_filter
@@ -2856,15 +2866,15 @@ async def read_work_queues(
else:
response = await self._client.post("/work_queues/filter", json=json)
- return pydantic.TypeAdapter(List[WorkQueue]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[WorkQueue]).validate_python(response.json())
async def get_scheduled_flow_runs_for_deployments(
self,
- deployment_ids: List[UUID],
+ deployment_ids: list[UUID],
scheduled_before: Optional[datetime.datetime] = None,
limit: Optional[int] = None,
- ) -> List[FlowRunResponse]:
- body: Dict[str, Any] = dict(deployment_ids=[str(id) for id in deployment_ids])
+ ) -> list[FlowRunResponse]:
+ body: dict[str, Any] = dict(deployment_ids=[str(id) for id in deployment_ids])
if scheduled_before:
body["scheduled_before"] = str(scheduled_before)
if limit:
@@ -2875,16 +2885,16 @@ async def get_scheduled_flow_runs_for_deployments(
json=body,
)
- return pydantic.TypeAdapter(List[FlowRunResponse]).validate_python(
+ return pydantic.TypeAdapter(list[FlowRunResponse]).validate_python(
response.json()
)
async def get_scheduled_flow_runs_for_work_pool(
self,
work_pool_name: str,
- work_queue_names: Optional[List[str]] = None,
+ work_queue_names: Optional[list[str]] = None,
scheduled_before: Optional[datetime.datetime] = None,
- ) -> List[WorkerFlowRunResponse]:
+ ) -> list[WorkerFlowRunResponse]:
"""
Retrieves scheduled flow runs for the provided set of work pool queues.
@@ -2900,7 +2910,7 @@ async def get_scheduled_flow_runs_for_work_pool(
A list of worker flow run responses containing information about the
retrieved flow runs.
"""
- body: Dict[str, Any] = {}
+ body: dict[str, Any] = {}
if work_queue_names is not None:
body["work_queue_names"] = list(work_queue_names)
if scheduled_before:
@@ -2910,7 +2920,7 @@ async def get_scheduled_flow_runs_for_work_pool(
f"/work_pools/{work_pool_name}/get_scheduled_flow_runs",
json=body,
)
- return pydantic.TypeAdapter(List[WorkerFlowRunResponse]).validate_python(
+ return pydantic.TypeAdapter(list[WorkerFlowRunResponse]).validate_python(
response.json()
)
@@ -2956,13 +2966,13 @@ async def update_artifact(
async def read_artifacts(
self,
*,
- artifact_filter: ArtifactFilter = None,
- flow_run_filter: FlowRunFilter = None,
- task_run_filter: TaskRunFilter = None,
- sort: ArtifactSort = None,
+ artifact_filter: Optional[ArtifactFilter] = None,
+ flow_run_filter: Optional[FlowRunFilter] = None,
+ task_run_filter: Optional[TaskRunFilter] = None,
+ sort: Optional[ArtifactSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[Artifact]:
+ ) -> list[Artifact]:
"""
Query the Prefect API for artifacts. Only artifacts matching all criteria will
be returned.
@@ -2976,7 +2986,7 @@ async def read_artifacts(
Returns:
a list of Artifact model representations of the artifacts
"""
- body = {
+ body: dict[str, Any] = {
"artifacts": (
artifact_filter.model_dump(mode="json") if artifact_filter else None
),
@@ -2991,18 +3001,18 @@ async def read_artifacts(
"offset": offset,
}
response = await self._client.post("/artifacts/filter", json=body)
- return pydantic.TypeAdapter(List[Artifact]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Artifact]).validate_python(response.json())
async def read_latest_artifacts(
self,
*,
- artifact_filter: ArtifactCollectionFilter = None,
- flow_run_filter: FlowRunFilter = None,
- task_run_filter: TaskRunFilter = None,
- sort: ArtifactCollectionSort = None,
+ artifact_filter: Optional[ArtifactCollectionFilter] = None,
+ flow_run_filter: Optional[FlowRunFilter] = None,
+ task_run_filter: Optional[TaskRunFilter] = None,
+ sort: Optional[ArtifactCollectionSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[ArtifactCollection]:
+ ) -> list[ArtifactCollection]:
"""
Query the Prefect API for artifacts. Only artifacts matching all criteria will
be returned.
@@ -3016,7 +3026,7 @@ async def read_latest_artifacts(
Returns:
a list of Artifact model representations of the artifacts
"""
- body = {
+ body: dict[str, Any] = {
"artifacts": (
artifact_filter.model_dump(mode="json") if artifact_filter else None
),
@@ -3031,7 +3041,7 @@ async def read_latest_artifacts(
"offset": offset,
}
response = await self._client.post("/artifacts/latest/filter", json=body)
- return pydantic.TypeAdapter(List[ArtifactCollection]).validate_python(
+ return pydantic.TypeAdapter(list[ArtifactCollection]).validate_python(
response.json()
)
@@ -3090,7 +3100,7 @@ async def read_variable_by_name(self, name: str) -> Optional[Variable]:
else:
raise
- async def delete_variable_by_name(self, name: str):
+ async def delete_variable_by_name(self, name: str) -> None:
"""Deletes a variable by name."""
try:
await self._client.delete(f"/variables/name/{name}")
@@ -3100,12 +3110,12 @@ async def delete_variable_by_name(self, name: str):
else:
raise
- async def read_variables(self, limit: Optional[int] = None) -> List[Variable]:
+ async def read_variables(self, limit: Optional[int] = None) -> list[Variable]:
"""Reads all variables."""
response = await self._client.post("/variables/filter", json={"limit": limit})
- return pydantic.TypeAdapter(List[Variable]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Variable]).validate_python(response.json())
- async def read_worker_metadata(self) -> Dict[str, Any]:
+ async def read_worker_metadata(self) -> dict[str, Any]:
"""Reads worker metadata stored in Prefect collection registry."""
response = await self._client.get("collections/views/aggregate-worker-metadata")
response.raise_for_status()
@@ -3113,7 +3123,7 @@ async def read_worker_metadata(self) -> Dict[str, Any]:
async def increment_concurrency_slots(
self,
- names: List[str],
+ names: list[str],
slots: int,
mode: str,
create_if_missing: Optional[bool] = None,
@@ -3129,7 +3139,7 @@ async def increment_concurrency_slots(
)
async def release_concurrency_slots(
- self, names: List[str], slots: int, occupancy_seconds: float
+ self, names: list[str], slots: int, occupancy_seconds: float
) -> httpx.Response:
"""
Release concurrency slots for the specified limits.
@@ -3201,7 +3211,9 @@ async def read_global_concurrency_limit_by_name(
else:
raise
- async def upsert_global_concurrency_limit_by_name(self, name: str, limit: int):
+ async def upsert_global_concurrency_limit_by_name(
+ self, name: str, limit: int
+ ) -> None:
"""Creates a global concurrency limit with the given name and limit if one does not already exist.
If one does already exist matching the name then update it's limit if it is different.
@@ -3227,7 +3239,7 @@ async def upsert_global_concurrency_limit_by_name(self, name: str, limit: int):
async def read_global_concurrency_limits(
self, limit: int = 10, offset: int = 0
- ) -> List[GlobalConcurrencyLimitResponse]:
+ ) -> list[GlobalConcurrencyLimitResponse]:
response = await self._client.post(
"/v2/concurrency_limits/filter",
json={
@@ -3236,12 +3248,12 @@ async def read_global_concurrency_limits(
},
)
return pydantic.TypeAdapter(
- List[GlobalConcurrencyLimitResponse]
+ list[GlobalConcurrencyLimitResponse]
).validate_python(response.json())
async def create_flow_run_input(
self, flow_run_id: UUID, key: str, value: str, sender: Optional[str] = None
- ):
+ ) -> None:
"""
Creates a flow run input.
@@ -3262,8 +3274,8 @@ async def create_flow_run_input(
response.raise_for_status()
async def filter_flow_run_input(
- self, flow_run_id: UUID, key_prefix: str, limit: int, exclude_keys: Set[str]
- ) -> List[FlowRunInput]:
+ self, flow_run_id: UUID, key_prefix: str, limit: int, exclude_keys: set[str]
+ ) -> list[FlowRunInput]:
response = await self._client.post(
f"/flow_runs/{flow_run_id}/input/filter",
json={
@@ -3273,7 +3285,7 @@ async def filter_flow_run_input(
},
)
response.raise_for_status()
- return pydantic.TypeAdapter(List[FlowRunInput]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[FlowRunInput]).validate_python(response.json())
async def read_flow_run_input(self, flow_run_id: UUID, key: str) -> str:
"""
@@ -3287,7 +3299,7 @@ async def read_flow_run_input(self, flow_run_id: UUID, key: str) -> str:
response.raise_for_status()
return response.content.decode()
- async def delete_flow_run_input(self, flow_run_id: UUID, key: str):
+ async def delete_flow_run_input(self, flow_run_id: UUID, key: str) -> None:
"""
Deletes a flow run input.
@@ -3307,7 +3319,9 @@ async def create_automation(self, automation: AutomationCore) -> UUID:
return UUID(response.json()["id"])
- async def update_automation(self, automation_id: UUID, automation: AutomationCore):
+ async def update_automation(
+ self, automation_id: UUID, automation: AutomationCore
+ ) -> None:
"""Updates an automation in Prefect Cloud."""
response = await self._client.put(
f"/automations/{automation_id}",
@@ -3315,21 +3329,23 @@ async def update_automation(self, automation_id: UUID, automation: AutomationCor
)
response.raise_for_status
- async def read_automations(self) -> List[Automation]:
+ async def read_automations(self) -> list[Automation]:
response = await self._client.post("/automations/filter")
response.raise_for_status()
- return pydantic.TypeAdapter(List[Automation]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Automation]).validate_python(response.json())
async def find_automation(
self, id_or_name: Union[str, UUID]
) -> Optional[Automation]:
if isinstance(id_or_name, str):
+ name = id_or_name
try:
id = UUID(id_or_name)
except ValueError:
id = None
- elif isinstance(id_or_name, UUID):
+ else:
id = id_or_name
+ name = str(id)
if id:
try:
@@ -3343,24 +3359,26 @@ async def find_automation(
# Look for it by an exact name
for automation in automations:
- if automation.name == id_or_name:
+ if automation.name == name:
return automation
# Look for it by a case-insensitive name
for automation in automations:
- if automation.name.lower() == id_or_name.lower():
+ if automation.name.lower() == name.lower():
return automation
return None
- async def read_automation(self, automation_id: UUID) -> Optional[Automation]:
+ async def read_automation(
+ self, automation_id: Union[UUID, str]
+ ) -> Optional[Automation]:
response = await self._client.get(f"/automations/{automation_id}")
if response.status_code == 404:
return None
response.raise_for_status()
return Automation.model_validate(response.json())
- async def read_automations_by_name(self, name: str) -> List[Automation]:
+ async def read_automations_by_name(self, name: str) -> list[Automation]:
"""
Query the Prefect API for an automation by name. Only automations matching the provided name will be returned.
@@ -3370,7 +3388,9 @@ async def read_automations_by_name(self, name: str) -> List[Automation]:
Returns:
a list of Automation model representations of the automations
"""
- automation_filter = filters.AutomationFilter(name=dict(any_=[name]))
+ automation_filter = filters.AutomationFilter(
+ name=filters.AutomationFilterName(any_=[name])
+ )
response = await self._client.post(
"/automations/filter",
@@ -3384,21 +3404,21 @@ async def read_automations_by_name(self, name: str) -> List[Automation]:
response.raise_for_status()
- return pydantic.TypeAdapter(List[Automation]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Automation]).validate_python(response.json())
- async def pause_automation(self, automation_id: UUID):
+ async def pause_automation(self, automation_id: UUID) -> None:
response = await self._client.patch(
f"/automations/{automation_id}", json={"enabled": False}
)
response.raise_for_status()
- async def resume_automation(self, automation_id: UUID):
+ async def resume_automation(self, automation_id: UUID) -> None:
response = await self._client.patch(
f"/automations/{automation_id}", json={"enabled": True}
)
response.raise_for_status()
- async def delete_automation(self, automation_id: UUID):
+ async def delete_automation(self, automation_id: UUID) -> None:
response = await self._client.delete(f"/automations/{automation_id}")
if response.status_code == 404:
return
@@ -3407,12 +3427,12 @@ async def delete_automation(self, automation_id: UUID):
async def read_resource_related_automations(
self, resource_id: str
- ) -> List[Automation]:
+ ) -> list[Automation]:
response = await self._client.get(f"/automations/related-to/{resource_id}")
response.raise_for_status()
- return pydantic.TypeAdapter(List[Automation]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[Automation]).validate_python(response.json())
- async def delete_resource_owned_automations(self, resource_id: str):
+ async def delete_resource_owned_automations(self, resource_id: str) -> None:
await self._client.delete(f"/automations/owned-by/{resource_id}")
async def api_version(self) -> str:
@@ -3422,7 +3442,7 @@ async def api_version(self) -> str:
def client_version(self) -> str:
return prefect.__version__
- async def raise_for_api_version_mismatch(self):
+ async def raise_for_api_version_mismatch(self) -> None:
# Cloud is always compatible as a server
if self.server_type == ServerType.CLOUD:
return
@@ -3441,7 +3461,7 @@ async def raise_for_api_version_mismatch(self):
f"Major versions must match."
)
- async def __aenter__(self):
+ async def __aenter__(self) -> Self:
"""
Start the client.
@@ -3488,7 +3508,7 @@ async def __aenter__(self):
return self
- async def __aexit__(self, *exc_info):
+ async def __aexit__(self, *exc_info: Any) -> Optional[bool]:
"""
Shutdown the client.
"""
@@ -3499,13 +3519,13 @@ async def __aexit__(self, *exc_info):
self._closed = True
return await self._exit_stack.__aexit__(*exc_info)
- def __enter__(self):
+ def __enter__(self) -> NoReturn:
raise RuntimeError(
"The `PrefectClient` must be entered with an async context. Use 'async "
"with PrefectClient(...)' not 'with PrefectClient(...)'"
)
- def __exit__(self, *_):
+ def __exit__(self, *_: object) -> NoReturn:
assert False, "This should never be called but must be defined for __enter__"
@@ -3541,7 +3561,7 @@ def __init__(
*,
api_key: Optional[str] = None,
api_version: Optional[str] = None,
- httpx_settings: Optional[Dict[str, Any]] = None,
+ httpx_settings: Optional[dict[str, Any]] = None,
server_type: Optional[ServerType] = None,
) -> None:
httpx_settings = httpx_settings.copy() if httpx_settings else {}
@@ -3617,16 +3637,10 @@ def __init__(
)
# Connect to an in-process application
- elif isinstance(api, ASGIApp):
+ else:
self._ephemeral_app = api
self.server_type = ServerType.EPHEMERAL
- else:
- raise TypeError(
- f"Unexpected type {type(api).__name__!r} for argument `api`. Expected"
- " 'str' or 'ASGIApp/FastAPI'"
- )
-
# See https://www.python-httpx.org/advanced/#timeout-configuration
httpx_settings.setdefault(
"timeout",
@@ -3669,9 +3683,9 @@ def __init__(
if isinstance(server_transport, httpx.HTTPTransport):
pool = getattr(server_transport, "_pool", None)
if isinstance(pool, httpcore.ConnectionPool):
- pool._retries = 3
+ setattr(pool, "_retries", 3)
- self.logger = get_logger("client")
+ self.logger: Logger = get_logger("client")
@property
def api_url(self) -> httpx.URL:
@@ -3709,7 +3723,7 @@ def __enter__(self) -> "SyncPrefectClient":
return self
- def __exit__(self, *exc_info) -> None:
+ def __exit__(self, *exc_info: Any) -> None:
"""
Shutdown the client.
"""
@@ -3747,7 +3761,7 @@ def api_version(self) -> str:
def client_version(self) -> str:
return prefect.__version__
- def raise_for_api_version_mismatch(self):
+ def raise_for_api_version_mismatch(self) -> None:
# Cloud is always compatible as a server
if self.server_type == ServerType.CLOUD:
return
@@ -3766,7 +3780,7 @@ def raise_for_api_version_mismatch(self):
f"Major versions must match."
)
- def create_flow(self, flow: "FlowObject") -> UUID:
+ def create_flow(self, flow: "FlowObject[Any, Any]") -> UUID:
"""
Create a flow in the Prefect API.
@@ -3806,13 +3820,13 @@ def create_flow_from_name(self, flow_name: str) -> UUID:
def create_flow_run(
self,
- flow: "FlowObject",
+ flow: "FlowObject[Any, R]",
name: Optional[str] = None,
- parameters: Optional[Dict[str, Any]] = None,
- context: Optional[Dict[str, Any]] = None,
+ parameters: Optional[dict[str, Any]] = None,
+ context: Optional[dict[str, Any]] = None,
tags: Optional[Iterable[str]] = None,
parent_task_run_id: Optional[UUID] = None,
- state: Optional["prefect.states.State"] = None,
+ state: Optional["prefect.states.State[R]"] = None,
) -> FlowRun:
"""
Create a flow run for a flow.
@@ -3854,7 +3868,7 @@ def create_flow_run(
state=state.to_state_create(),
empirical_policy=FlowRunPolicy(
retries=flow.retries,
- retry_delay=flow.retry_delay_seconds,
+ retry_delay=int(flow.retry_delay_seconds or 0),
),
)
@@ -3872,12 +3886,12 @@ def update_flow_run(
self,
flow_run_id: UUID,
flow_version: Optional[str] = None,
- parameters: Optional[dict] = None,
+ parameters: Optional[dict[str, Any]] = None,
name: Optional[str] = None,
tags: Optional[Iterable[str]] = None,
empirical_policy: Optional[FlowRunPolicy] = None,
infrastructure_pid: Optional[str] = None,
- job_variables: Optional[dict] = None,
+ job_variables: Optional[dict[str, Any]] = None,
) -> httpx.Response:
"""
Update a flow run's details.
@@ -3898,7 +3912,7 @@ def update_flow_run(
Returns:
an `httpx.Response` object from the PATCH request
"""
- params = {}
+ params: dict[str, Any] = {}
if flow_version is not None:
params["flow_version"] = flow_version
if parameters is not None:
@@ -3954,7 +3968,7 @@ def read_flow_runs(
sort: Optional[FlowRunSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[FlowRun]:
+ ) -> list[FlowRun]:
"""
Query the Prefect API for flow runs. Only flow runs matching all criteria will
be returned.
@@ -3974,7 +3988,7 @@ def read_flow_runs(
a list of Flow Run model representations
of the flow runs
"""
- body = {
+ body: dict[str, Any] = {
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
"flow_runs": (
flow_run_filter.model_dump(mode="json", exclude_unset=True)
@@ -3999,14 +4013,14 @@ def read_flow_runs(
}
response = self._client.post("/flow_runs/filter", json=body)
- return pydantic.TypeAdapter(List[FlowRun]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[FlowRun]).validate_python(response.json())
def set_flow_run_state(
self,
flow_run_id: UUID,
- state: "prefect.states.State",
+ state: "prefect.states.State[T]",
force: bool = False,
- ) -> OrchestrationResult:
+ ) -> OrchestrationResult[T]:
"""
Set the state of a flow run.
@@ -4036,16 +4050,19 @@ def set_flow_run_state(
else:
raise
- return OrchestrationResult.model_validate(response.json())
+ result: OrchestrationResult[T] = OrchestrationResult.model_validate(
+ response.json()
+ )
+ return result
- def set_flow_run_name(self, flow_run_id: UUID, name: str):
+ def set_flow_run_name(self, flow_run_id: UUID, name: str) -> httpx.Response:
flow_run_data = FlowRunUpdate(name=name)
return self._client.patch(
f"/flow_runs/{flow_run_id}",
json=flow_run_data.model_dump(mode="json", exclude_unset=True),
)
- def set_task_run_name(self, task_run_id: UUID, name: str):
+ def set_task_run_name(self, task_run_id: UUID, name: str) -> httpx.Response:
task_run_data = TaskRunUpdate(name=name)
return self._client.patch(
f"/task_runs/{task_run_id}",
@@ -4062,9 +4079,9 @@ def create_task_run(
extra_tags: Optional[Iterable[str]] = None,
state: Optional[prefect.states.State[R]] = None,
task_inputs: Optional[
- Dict[
+ dict[
str,
- List[
+ list[
Union[
TaskRunResult,
Parameter,
@@ -4098,6 +4115,12 @@ def create_task_run(
if state is None:
state = prefect.states.Pending()
+ retry_delay = task.retry_delay_seconds
+ if isinstance(retry_delay, list):
+ retry_delay = [int(rd) for rd in retry_delay]
+ elif isinstance(retry_delay, float):
+ retry_delay = int(retry_delay)
+
task_run_data = TaskRunCreate(
id=id,
name=name,
@@ -4108,7 +4131,7 @@ def create_task_run(
task_version=task.version,
empirical_policy=TaskRunPolicy(
retries=task.retries,
- retry_delay=task.retry_delay_seconds,
+ retry_delay=retry_delay,
retry_jitter_factor=task.retry_jitter_factor,
),
state=state.to_state_create(),
@@ -4142,14 +4165,14 @@ def read_task_run(self, task_run_id: UUID) -> TaskRun:
def read_task_runs(
self,
*,
- flow_filter: FlowFilter = None,
- flow_run_filter: FlowRunFilter = None,
- task_run_filter: TaskRunFilter = None,
- deployment_filter: DeploymentFilter = None,
- sort: TaskRunSort = None,
+ flow_filter: Optional[FlowFilter] = None,
+ flow_run_filter: Optional[FlowRunFilter] = None,
+ task_run_filter: Optional[TaskRunFilter] = None,
+ deployment_filter: Optional[DeploymentFilter] = None,
+ sort: Optional[TaskRunSort] = None,
limit: Optional[int] = None,
offset: int = 0,
- ) -> List[TaskRun]:
+ ) -> list[TaskRun]:
"""
Query the Prefect API for task runs. Only task runs matching all criteria will
be returned.
@@ -4167,7 +4190,7 @@ def read_task_runs(
a list of Task Run model representations
of the task runs
"""
- body = {
+ body: dict[str, Any] = {
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
"flow_runs": (
flow_run_filter.model_dump(mode="json", exclude_unset=True)
@@ -4185,14 +4208,14 @@ def read_task_runs(
"offset": offset,
}
response = self._client.post("/task_runs/filter", json=body)
- return pydantic.TypeAdapter(List[TaskRun]).validate_python(response.json())
+ return pydantic.TypeAdapter(list[TaskRun]).validate_python(response.json())
def set_task_run_state(
self,
task_run_id: UUID,
- state: prefect.states.State,
+ state: prefect.states.State[Any],
force: bool = False,
- ) -> OrchestrationResult:
+ ) -> OrchestrationResult[Any]:
"""
Set the state of a task run.
@@ -4211,9 +4234,12 @@ def set_task_run_state(
f"/task_runs/{task_run_id}/set_state",
json=dict(state=state_create.model_dump(mode="json"), force=force),
)
- return OrchestrationResult.model_validate(response.json())
+ result: OrchestrationResult[Any] = OrchestrationResult.model_validate(
+ response.json()
+ )
+ return result
- def read_task_run_states(self, task_run_id: UUID) -> List[prefect.states.State]:
+ def read_task_run_states(self, task_run_id: UUID) -> list[prefect.states.State]:
"""
Query for the states of a task run
@@ -4226,7 +4252,7 @@ def read_task_run_states(self, task_run_id: UUID) -> List[prefect.states.State]:
response = self._client.get(
"/task_run_states/", params=dict(task_run_id=str(task_run_id))
)
- return pydantic.TypeAdapter(List[prefect.states.State]).validate_python(
+ return pydantic.TypeAdapter(list[prefect.states.State]).validate_python(
response.json()
)
@@ -4300,7 +4326,7 @@ def create_artifact(
return Artifact.model_validate(response.json())
def release_concurrency_slots(
- self, names: List[str], slots: int, occupancy_seconds: float
+ self, names: list[str], slots: int, occupancy_seconds: float
) -> httpx.Response:
"""
Release concurrency slots for the specified limits.
@@ -4324,7 +4350,7 @@ def release_concurrency_slots(
)
def decrement_v1_concurrency_slots(
- self, names: List[str], occupancy_seconds: float, task_run_id: UUID
+ self, names: list[str], occupancy_seconds: float, task_run_id: UUID
) -> httpx.Response:
"""
Release the specified concurrency limits.
diff --git a/src/prefect/client/schemas/__init__.py b/src/prefect/client/schemas/__init__.py
index c5335d4906b0..2a35e6a1f3c0 100644
--- a/src/prefect/client/schemas/__init__.py
+++ b/src/prefect/client/schemas/__init__.py
@@ -25,3 +25,27 @@
StateAcceptDetails,
StateRejectDetails,
)
+
+__all__ = (
+ "BlockDocument",
+ "BlockSchema",
+ "BlockType",
+ "BlockTypeUpdate",
+ "DEFAULT_BLOCK_SCHEMA_VERSION",
+ "FlowRun",
+ "FlowRunPolicy",
+ "OrchestrationResult",
+ "SetStateStatus",
+ "State",
+ "StateAbortDetails",
+ "StateAcceptDetails",
+ "StateCreate",
+ "StateDetails",
+ "StateRejectDetails",
+ "StateType",
+ "TaskRun",
+ "TaskRunInput",
+ "TaskRunPolicy",
+ "TaskRunResult",
+ "Workspace",
+)
diff --git a/src/prefect/client/schemas/actions.py b/src/prefect/client/schemas/actions.py
index 9e0dd4bd3052..6f17c7cd8cc8 100644
--- a/src/prefect/client/schemas/actions.py
+++ b/src/prefect/client/schemas/actions.py
@@ -1,5 +1,5 @@
from copy import deepcopy
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeVar, Union
+from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union
from uuid import UUID, uuid4
import jsonschema
@@ -51,7 +51,7 @@ class StateCreate(ActionBaseModel):
name: Optional[str] = Field(default=None)
message: Optional[str] = Field(default=None, examples=["Run started"])
state_details: StateDetails = Field(default_factory=StateDetails)
- data: Union["BaseResult[R]", "ResultRecordMetadata", Any] = Field(
+ data: Union["BaseResult[Any]", "ResultRecordMetadata", Any] = Field(
default=None,
)
@@ -62,18 +62,19 @@ class FlowCreate(ActionBaseModel):
name: str = Field(
default=..., description="The name of the flow", examples=["my-flow"]
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of flow tags",
examples=[["tag-1", "tag-2"]],
)
- labels: KeyValueLabelsField
+
+ labels: KeyValueLabelsField = Field(default_factory=dict)
class FlowUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a flow."""
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of flow tags",
examples=[["tag-1", "tag-2"]],
@@ -94,7 +95,7 @@ class DeploymentScheduleCreate(ActionBaseModel):
@field_validator("max_scheduled_runs")
@classmethod
- def validate_max_scheduled_runs(cls, v):
+ def validate_max_scheduled_runs(cls, v: Optional[int]) -> Optional[int]:
return validate_schedule_max_scheduled_runs(
v, PREFECT_DEPLOYMENT_SCHEDULE_MAX_SCHEDULED_RUNS.value()
)
@@ -115,7 +116,7 @@ class DeploymentScheduleUpdate(ActionBaseModel):
@field_validator("max_scheduled_runs")
@classmethod
- def validate_max_scheduled_runs(cls, v):
+ def validate_max_scheduled_runs(cls, v: Optional[int]) -> Optional[int]:
return validate_schedule_max_scheduled_runs(
v, PREFECT_DEPLOYMENT_SCHEDULE_MAX_SCHEDULED_RUNS.value()
)
@@ -126,18 +127,20 @@ class DeploymentCreate(ActionBaseModel):
@model_validator(mode="before")
@classmethod
- def remove_old_fields(cls, values):
+ def remove_old_fields(cls, values: dict[str, Any]) -> dict[str, Any]:
return remove_old_deployment_fields(values)
@field_validator("description", "tags", mode="before")
@classmethod
- def convert_to_strings(cls, values):
+ def convert_to_strings(
+ cls, values: Optional[Union[str, list[str]]]
+ ) -> Union[str, list[str]]:
return convert_to_strings(values)
name: str = Field(..., description="The name of the deployment.")
flow_id: UUID = Field(..., description="The ID of the flow to deploy.")
- paused: Optional[bool] = Field(None)
- schedules: List[DeploymentScheduleCreate] = Field(
+ paused: Optional[bool] = Field(default=None)
+ schedules: list[DeploymentScheduleCreate] = Field(
default_factory=list,
description="A list of schedules for the deployment.",
)
@@ -155,33 +158,33 @@ def convert_to_strings(cls, values):
"Whether or not the deployment should enforce the parameter schema."
),
)
- parameter_openapi_schema: Optional[Dict[str, Any]] = Field(default_factory=dict)
- parameters: Dict[str, Any] = Field(
+ parameter_openapi_schema: Optional[dict[str, Any]] = Field(default_factory=dict)
+ parameters: dict[str, Any] = Field(
default_factory=dict,
description="Parameters for flow runs scheduled by the deployment.",
)
- tags: List[str] = Field(default_factory=list)
- labels: KeyValueLabelsField
- pull_steps: Optional[List[dict]] = Field(None)
+ tags: list[str] = Field(default_factory=list)
+ labels: KeyValueLabelsField = Field(default_factory=dict)
+ pull_steps: Optional[list[dict[str, Any]]] = Field(default=None)
- work_queue_name: Optional[str] = Field(None)
+ work_queue_name: Optional[str] = Field(default=None)
work_pool_name: Optional[str] = Field(
default=None,
description="The name of the deployment's work pool.",
examples=["my-work-pool"],
)
- storage_document_id: Optional[UUID] = Field(None)
- infrastructure_document_id: Optional[UUID] = Field(None)
- description: Optional[str] = Field(None)
- path: Optional[str] = Field(None)
- version: Optional[str] = Field(None)
- entrypoint: Optional[str] = Field(None)
- job_variables: Dict[str, Any] = Field(
+ storage_document_id: Optional[UUID] = Field(default=None)
+ infrastructure_document_id: Optional[UUID] = Field(default=None)
+ description: Optional[str] = Field(default=None)
+ path: Optional[str] = Field(default=None)
+ version: Optional[str] = Field(default=None)
+ entrypoint: Optional[str] = Field(default=None)
+ job_variables: dict[str, Any] = Field(
default_factory=dict,
description="Overrides to apply to flow run infrastructure at runtime.",
)
- def check_valid_configuration(self, base_job_template: dict):
+ def check_valid_configuration(self, base_job_template: dict[str, Any]) -> None:
"""Check that the combination of base_job_template defaults
and job_variables conforms to the specified schema.
"""
@@ -206,19 +209,19 @@ class DeploymentUpdate(ActionBaseModel):
@model_validator(mode="before")
@classmethod
- def remove_old_fields(cls, values):
+ def remove_old_fields(cls, values: dict[str, Any]) -> dict[str, Any]:
return remove_old_deployment_fields(values)
- version: Optional[str] = Field(None)
- description: Optional[str] = Field(None)
- parameters: Optional[Dict[str, Any]] = Field(
+ version: Optional[str] = Field(default=None)
+ description: Optional[str] = Field(default=None)
+ parameters: Optional[dict[str, Any]] = Field(
default=None,
description="Parameters for flow runs scheduled by the deployment.",
)
paused: Optional[bool] = Field(
default=None, description="Whether or not the deployment is paused."
)
- schedules: Optional[List[DeploymentScheduleCreate]] = Field(
+ schedules: Optional[list[DeploymentScheduleCreate]] = Field(
default=None,
description="A list of schedules for the deployment.",
)
@@ -230,21 +233,21 @@ def remove_old_fields(cls, values):
default=None,
description="The concurrency options for the deployment.",
)
- tags: List[str] = Field(default_factory=list)
- work_queue_name: Optional[str] = Field(None)
+ tags: list[str] = Field(default_factory=list)
+ work_queue_name: Optional[str] = Field(default=None)
work_pool_name: Optional[str] = Field(
default=None,
description="The name of the deployment's work pool.",
examples=["my-work-pool"],
)
- path: Optional[str] = Field(None)
- job_variables: Optional[Dict[str, Any]] = Field(
+ path: Optional[str] = Field(default=None)
+ job_variables: Optional[dict[str, Any]] = Field(
default_factory=dict,
description="Overrides to apply to flow run infrastructure at runtime.",
)
- entrypoint: Optional[str] = Field(None)
- storage_document_id: Optional[UUID] = Field(None)
- infrastructure_document_id: Optional[UUID] = Field(None)
+ entrypoint: Optional[str] = Field(default=None)
+ storage_document_id: Optional[UUID] = Field(default=None)
+ infrastructure_document_id: Optional[UUID] = Field(default=None)
enforce_parameter_schema: Optional[bool] = Field(
default=None,
description=(
@@ -252,7 +255,7 @@ def remove_old_fields(cls, values):
),
)
- def check_valid_configuration(self, base_job_template: dict):
+ def check_valid_configuration(self, base_job_template: dict[str, Any]) -> None:
"""Check that the combination of base_job_template defaults
and job_variables conforms to the specified schema.
"""
@@ -276,15 +279,15 @@ def check_valid_configuration(self, base_job_template: dict):
class FlowRunUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a flow run."""
- name: Optional[str] = Field(None)
- flow_version: Optional[str] = Field(None)
- parameters: Optional[Dict[str, Any]] = Field(default_factory=dict)
+ name: Optional[str] = Field(default=None)
+ flow_version: Optional[str] = Field(default=None)
+ parameters: Optional[dict[str, Any]] = Field(default_factory=dict)
empirical_policy: objects.FlowRunPolicy = Field(
default_factory=objects.FlowRunPolicy
)
- tags: List[str] = Field(default_factory=list)
- infrastructure_pid: Optional[str] = Field(None)
- job_variables: Optional[Dict[str, Any]] = Field(None)
+ tags: list[str] = Field(default_factory=list)
+ infrastructure_pid: Optional[str] = Field(default=None)
+ job_variables: Optional[dict[str, Any]] = Field(default=None)
class TaskRunCreate(ActionBaseModel):
@@ -300,7 +303,7 @@ class TaskRunCreate(ActionBaseModel):
default=None,
description="The name of the task run",
)
- flow_run_id: Optional[UUID] = Field(None)
+ flow_run_id: Optional[UUID] = Field(default=None)
task_key: str = Field(
default=..., description="A unique identifier for the task being run."
)
@@ -311,17 +314,17 @@ class TaskRunCreate(ActionBaseModel):
" within the same flow run."
),
)
- cache_key: Optional[str] = Field(None)
- cache_expiration: Optional[objects.DateTime] = Field(None)
- task_version: Optional[str] = Field(None)
+ cache_key: Optional[str] = Field(default=None)
+ cache_expiration: Optional[objects.DateTime] = Field(default=None)
+ task_version: Optional[str] = Field(default=None)
empirical_policy: objects.TaskRunPolicy = Field(
default_factory=objects.TaskRunPolicy,
)
- tags: List[str] = Field(default_factory=list)
- labels: KeyValueLabelsField
- task_inputs: Dict[
+ tags: list[str] = Field(default_factory=list)
+ labels: KeyValueLabelsField = Field(default_factory=dict)
+ task_inputs: dict[
str,
- List[
+ list[
Union[
objects.TaskRunResult,
objects.Parameter,
@@ -334,7 +337,7 @@ class TaskRunCreate(ActionBaseModel):
class TaskRunUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a task run"""
- name: Optional[str] = Field(None)
+ name: Optional[str] = Field(default=None)
class FlowRunCreate(ActionBaseModel):
@@ -347,22 +350,23 @@ class FlowRunCreate(ActionBaseModel):
name: Optional[str] = Field(default=None, description="The name of the flow run.")
flow_id: UUID = Field(default=..., description="The id of the flow being run.")
- deployment_id: Optional[UUID] = Field(None)
- flow_version: Optional[str] = Field(None)
- parameters: Dict[str, Any] = Field(
+ deployment_id: Optional[UUID] = Field(default=None)
+ flow_version: Optional[str] = Field(default=None)
+ parameters: dict[str, Any] = Field(
default_factory=dict, description="The parameters for the flow run."
)
- context: Dict[str, Any] = Field(
+ context: dict[str, Any] = Field(
default_factory=dict, description="The context for the flow run."
)
- parent_task_run_id: Optional[UUID] = Field(None)
- infrastructure_document_id: Optional[UUID] = Field(None)
+ parent_task_run_id: Optional[UUID] = Field(default=None)
+ infrastructure_document_id: Optional[UUID] = Field(default=None)
empirical_policy: objects.FlowRunPolicy = Field(
default_factory=objects.FlowRunPolicy
)
- tags: List[str] = Field(default_factory=list)
- labels: KeyValueLabelsField
- idempotency_key: Optional[str] = Field(None)
+ tags: list[str] = Field(default_factory=list)
+ idempotency_key: Optional[str] = Field(default=None)
+
+ labels: KeyValueLabelsField = Field(default_factory=dict)
class DeploymentFlowRunCreate(ActionBaseModel):
@@ -374,32 +378,32 @@ class DeploymentFlowRunCreate(ActionBaseModel):
)
name: Optional[str] = Field(default=None, description="The name of the flow run.")
- parameters: Dict[str, Any] = Field(
+ parameters: dict[str, Any] = Field(
default_factory=dict, description="The parameters for the flow run."
)
enforce_parameter_schema: Optional[bool] = Field(
default=None,
description="Whether or not to enforce the parameter schema on this run.",
)
- context: Dict[str, Any] = Field(
+ context: dict[str, Any] = Field(
default_factory=dict, description="The context for the flow run."
)
- infrastructure_document_id: Optional[UUID] = Field(None)
+ infrastructure_document_id: Optional[UUID] = Field(default=None)
empirical_policy: objects.FlowRunPolicy = Field(
default_factory=objects.FlowRunPolicy
)
- tags: List[str] = Field(default_factory=list)
- idempotency_key: Optional[str] = Field(None)
- parent_task_run_id: Optional[UUID] = Field(None)
- work_queue_name: Optional[str] = Field(None)
- job_variables: Optional[dict] = Field(None)
+ tags: list[str] = Field(default_factory=list)
+ idempotency_key: Optional[str] = Field(default=None)
+ parent_task_run_id: Optional[UUID] = Field(default=None)
+ work_queue_name: Optional[str] = Field(default=None)
+ job_variables: Optional[dict[str, Any]] = Field(default=None)
class SavedSearchCreate(ActionBaseModel):
"""Data used by the Prefect REST API to create a saved search."""
name: str = Field(default=..., description="The name of the saved search.")
- filters: List[objects.SavedSearchFilter] = Field(
+ filters: list[objects.SavedSearchFilter] = Field(
default_factory=list, description="The filter set for the saved search."
)
@@ -436,12 +440,12 @@ class ConcurrencyLimitV2Create(ActionBaseModel):
class ConcurrencyLimitV2Update(ActionBaseModel):
"""Data used by the Prefect REST API to update a v2 concurrency limit."""
- active: Optional[bool] = Field(None)
- name: Optional[Name] = Field(None)
- limit: Optional[NonNegativeInteger] = Field(None)
- active_slots: Optional[NonNegativeInteger] = Field(None)
- denied_slots: Optional[NonNegativeInteger] = Field(None)
- slot_decay_per_second: Optional[NonNegativeFloat] = Field(None)
+ active: Optional[bool] = Field(default=None)
+ name: Optional[Name] = Field(default=None)
+ limit: Optional[NonNegativeInteger] = Field(default=None)
+ active_slots: Optional[NonNegativeInteger] = Field(default=None)
+ denied_slots: Optional[NonNegativeInteger] = Field(default=None)
+ slot_decay_per_second: Optional[NonNegativeFloat] = Field(default=None)
class BlockTypeCreate(ActionBaseModel):
@@ -471,24 +475,24 @@ class BlockTypeCreate(ActionBaseModel):
class BlockTypeUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a block type."""
- logo_url: Optional[objects.HttpUrl] = Field(None)
- documentation_url: Optional[objects.HttpUrl] = Field(None)
- description: Optional[str] = Field(None)
- code_example: Optional[str] = Field(None)
+ logo_url: Optional[objects.HttpUrl] = Field(default=None)
+ documentation_url: Optional[objects.HttpUrl] = Field(default=None)
+ description: Optional[str] = Field(default=None)
+ code_example: Optional[str] = Field(default=None)
@classmethod
- def updatable_fields(cls) -> set:
+ def updatable_fields(cls) -> set[str]:
return get_class_fields_only(cls)
class BlockSchemaCreate(ActionBaseModel):
"""Data used by the Prefect REST API to create a block schema."""
- fields: Dict[str, Any] = Field(
+ fields: dict[str, Any] = Field(
default_factory=dict, description="The block schema's field schema"
)
- block_type_id: Optional[UUID] = Field(None)
- capabilities: List[str] = Field(
+ block_type_id: Optional[UUID] = Field(default=None)
+ capabilities: list[str] = Field(
default_factory=list,
description="A list of Block capabilities",
)
@@ -504,7 +508,7 @@ class BlockDocumentCreate(ActionBaseModel):
name: Optional[Name] = Field(
default=None, description="The name of the block document"
)
- data: Dict[str, Any] = Field(
+ data: dict[str, Any] = Field(
default_factory=dict, description="The block document's data"
)
block_schema_id: UUID = Field(
@@ -524,7 +528,9 @@ class BlockDocumentCreate(ActionBaseModel):
_validate_name_format = field_validator("name")(validate_block_document_name)
@model_validator(mode="before")
- def validate_name_is_present_if_not_anonymous(cls, values):
+ def validate_name_is_present_if_not_anonymous(
+ cls, values: dict[str, Any]
+ ) -> dict[str, Any]:
return validate_name_present_on_nonanonymous_blocks(values)
@@ -534,7 +540,7 @@ class BlockDocumentUpdate(ActionBaseModel):
block_schema_id: Optional[UUID] = Field(
default=None, description="A block schema ID"
)
- data: Dict[str, Any] = Field(
+ data: dict[str, Any] = Field(
default_factory=dict, description="The block document's data"
)
merge_existing_data: bool = Field(
@@ -565,11 +571,11 @@ class LogCreate(ActionBaseModel):
level: int = Field(default=..., description="The log level.")
message: str = Field(default=..., description="The log message.")
timestamp: DateTime = Field(default=..., description="The log timestamp.")
- flow_run_id: Optional[UUID] = Field(None)
- task_run_id: Optional[UUID] = Field(None)
- worker_id: Optional[UUID] = Field(None)
+ flow_run_id: Optional[UUID] = Field(default=None)
+ task_run_id: Optional[UUID] = Field(default=None)
+ worker_id: Optional[UUID] = Field(default=None)
- def model_dump(self, *args, **kwargs):
+ def model_dump(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
"""
The worker_id field is only included in logs sent to Prefect Cloud.
If it's unset, we should not include it in the log payload.
@@ -586,11 +592,11 @@ class WorkPoolCreate(ActionBaseModel):
name: NonEmptyishName = Field(
description="The name of the work pool.",
)
- description: Optional[str] = Field(None)
+ description: Optional[str] = Field(default=None)
type: str = Field(
description="The work pool type.", default="prefect-agent"
) # TODO: change default
- base_job_template: Dict[str, Any] = Field(
+ base_job_template: dict[str, Any] = Field(
default_factory=dict,
description="The base job template for the work pool.",
)
@@ -606,17 +612,17 @@ class WorkPoolCreate(ActionBaseModel):
class WorkPoolUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a work pool."""
- description: Optional[str] = Field(None)
- is_paused: Optional[bool] = Field(None)
- base_job_template: Optional[Dict[str, Any]] = Field(None)
- concurrency_limit: Optional[int] = Field(None)
+ description: Optional[str] = Field(default=None)
+ is_paused: Optional[bool] = Field(default=None)
+ base_job_template: Optional[dict[str, Any]] = Field(default=None)
+ concurrency_limit: Optional[int] = Field(default=None)
class WorkQueueCreate(ActionBaseModel):
"""Data used by the Prefect REST API to create a work queue."""
name: str = Field(default=..., description="The name of the work queue.")
- description: Optional[str] = Field(None)
+ description: Optional[str] = Field(default=None)
is_paused: bool = Field(
default=False,
description="Whether the work queue is paused.",
@@ -644,16 +650,16 @@ class WorkQueueCreate(ActionBaseModel):
class WorkQueueUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a work queue."""
- name: Optional[str] = Field(None)
- description: Optional[str] = Field(None)
+ name: Optional[str] = Field(default=None)
+ description: Optional[str] = Field(default=None)
is_paused: bool = Field(
default=False, description="Whether or not the work queue is paused."
)
- concurrency_limit: Optional[NonNegativeInteger] = Field(None)
+ concurrency_limit: Optional[NonNegativeInteger] = Field(default=None)
priority: Optional[PositiveInteger] = Field(
None, description="The queue's priority."
)
- last_polled: Optional[DateTime] = Field(None)
+ last_polled: Optional[DateTime] = Field(default=None)
# DEPRECATED
@@ -670,10 +676,10 @@ class FlowRunNotificationPolicyCreate(ActionBaseModel):
is_active: bool = Field(
default=True, description="Whether the policy is currently active"
)
- state_names: List[str] = Field(
+ state_names: list[str] = Field(
default=..., description="The flow run states that trigger notifications"
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default=...,
description="The flow run tags that trigger notifications (set [] to disable)",
)
@@ -695,7 +701,7 @@ class FlowRunNotificationPolicyCreate(ActionBaseModel):
@field_validator("message_template")
@classmethod
- def validate_message_template_variables(cls, v):
+ def validate_message_template_variables(cls, v: Optional[str]) -> Optional[str]:
return validate_message_template_variables(v)
@@ -703,8 +709,8 @@ class FlowRunNotificationPolicyUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a flow run notification policy."""
is_active: Optional[bool] = Field(default=None)
- state_names: Optional[List[str]] = Field(default=None)
- tags: Optional[List[str]] = Field(default=None)
+ state_names: Optional[list[str]] = Field(default=None)
+ tags: Optional[list[str]] = Field(default=None)
block_document_id: Optional[UUID] = Field(default=None)
message_template: Optional[str] = Field(default=None)
@@ -715,8 +721,8 @@ class ArtifactCreate(ActionBaseModel):
key: Optional[str] = Field(default=None)
type: Optional[str] = Field(default=None)
description: Optional[str] = Field(default=None)
- data: Optional[Union[Dict[str, Any], Any]] = Field(default=None)
- metadata_: Optional[Dict[str, str]] = Field(default=None)
+ data: Optional[Union[dict[str, Any], Any]] = Field(default=None)
+ metadata_: Optional[dict[str, str]] = Field(default=None)
flow_run_id: Optional[UUID] = Field(default=None)
task_run_id: Optional[UUID] = Field(default=None)
@@ -726,9 +732,9 @@ class ArtifactCreate(ActionBaseModel):
class ArtifactUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update an artifact."""
- data: Optional[Union[Dict[str, Any], Any]] = Field(None)
- description: Optional[str] = Field(None)
- metadata_: Optional[Dict[str, str]] = Field(None)
+ data: Optional[Union[dict[str, Any], Any]] = Field(default=None)
+ description: Optional[str] = Field(default=None)
+ metadata_: Optional[dict[str, str]] = Field(default=None)
class VariableCreate(ActionBaseModel):
@@ -745,7 +751,7 @@ class VariableCreate(ActionBaseModel):
description="The value of the variable",
examples=["my-value"],
)
- tags: Optional[List[str]] = Field(default=None)
+ tags: Optional[list[str]] = Field(default=None)
# validators
_validate_name_format = field_validator("name")(validate_variable_name)
@@ -765,7 +771,7 @@ class VariableUpdate(ActionBaseModel):
description="The value of the variable",
examples=["my-value"],
)
- tags: Optional[List[str]] = Field(default=None)
+ tags: Optional[list[str]] = Field(default=None)
# validators
_validate_name_format = field_validator("name")(validate_variable_name)
@@ -801,8 +807,8 @@ class GlobalConcurrencyLimitCreate(ActionBaseModel):
class GlobalConcurrencyLimitUpdate(ActionBaseModel):
"""Data used by the Prefect REST API to update a global concurrency limit."""
- name: Optional[Name] = Field(None)
- limit: Optional[NonNegativeInteger] = Field(None)
- active: Optional[bool] = Field(None)
- active_slots: Optional[NonNegativeInteger] = Field(None)
- slot_decay_per_second: Optional[NonNegativeFloat] = Field(None)
+ name: Optional[Name] = Field(default=None)
+ limit: Optional[NonNegativeInteger] = Field(default=None)
+ active: Optional[bool] = Field(default=None)
+ active_slots: Optional[NonNegativeInteger] = Field(default=None)
+ slot_decay_per_second: Optional[NonNegativeFloat] = Field(default=None)
diff --git a/src/prefect/client/schemas/objects.py b/src/prefect/client/schemas/objects.py
index ccd802b3dda4..087cd5b78ee3 100644
--- a/src/prefect/client/schemas/objects.py
+++ b/src/prefect/client/schemas/objects.py
@@ -1,15 +1,16 @@
import datetime
import warnings
+from collections.abc import Callable, Mapping
from functools import partial
from typing import (
TYPE_CHECKING,
Annotated,
Any,
- Dict,
+ ClassVar,
Generic,
- List,
Optional,
Union,
+ cast,
overload,
)
from uuid import UUID, uuid4
@@ -23,13 +24,12 @@
HttpUrl,
IPvAnyNetwork,
SerializationInfo,
+ SerializerFunctionWrapHandler,
Tag,
field_validator,
model_serializer,
model_validator,
)
-from pydantic.functional_validators import ModelWrapValidatorHandler
-from pydantic_extra_types.pendulum_dt import DateTime
from typing_extensions import Literal, Self, TypeVar
from prefect._internal.compatibility import deprecated
@@ -64,8 +64,13 @@
from prefect.utilities.pydantic import handle_secret_render
if TYPE_CHECKING:
+ from prefect.client.schemas.actions import StateCreate
from prefect.results import BaseResult, ResultRecordMetadata
+ DateTime = pendulum.DateTime
+else:
+ from pydantic_extra_types.pendulum_dt import DateTime
+
R = TypeVar("R", default=Any)
@@ -180,7 +185,7 @@ class StateDetails(PrefectBaseModel):
pause_timeout: Optional[DateTime] = None
pause_reschedule: bool = False
pause_key: Optional[str] = None
- run_input_keyset: Optional[Dict[str, str]] = None
+ run_input_keyset: Optional[dict[str, str]] = None
refresh_cache: Optional[bool] = None
retriable: Optional[bool] = None
transition_id: Optional[UUID] = None
@@ -215,11 +220,21 @@ class State(ObjectBaseModel, Generic[R]):
] = Field(default=None)
@overload
- def result(self: "State[R]", raise_on_failure: bool = True) -> R:
+ def result(
+ self: "State[R]",
+ raise_on_failure: Literal[True] = ...,
+ fetch: bool = ...,
+ retry_result_failure: bool = ...,
+ ) -> R:
...
@overload
- def result(self: "State[R]", raise_on_failure: bool = False) -> Union[R, Exception]:
+ def result(
+ self: "State[R]",
+ raise_on_failure: Literal[False] = False,
+ fetch: bool = ...,
+ retry_result_failure: bool = ...,
+ ) -> Union[R, Exception]:
...
@deprecated.deprecated_parameter(
@@ -311,7 +326,7 @@ def result(
retry_result_failure=retry_result_failure,
)
- def to_state_create(self):
+ def to_state_create(self) -> "StateCreate":
"""
Convert this state to a `StateCreate` type which can be used to set the state of
a run in the API.
@@ -327,7 +342,7 @@ def to_state_create(self):
)
if isinstance(self.data, BaseResult):
- data = self.data
+ data = cast(BaseResult[R], self.data)
elif isinstance(self.data, ResultRecord) and should_persist_result():
data = self.data.metadata
else:
@@ -348,14 +363,14 @@ def default_name_from_type(self) -> Self:
# validation check and an error will be raised after this function is called
name = self.name
if name is None and self.type:
- self.name = " ".join([v.capitalize() for v in self.type.value.split("_")])
+ self.name = " ".join([v.capitalize() for v in self.type.split("_")])
return self
@model_validator(mode="after")
def default_scheduled_start_time(self) -> Self:
if self.type == StateType.SCHEDULED:
if not self.state_details.scheduled_time:
- self.state_details.scheduled_time = DateTime.now("utc")
+ self.state_details.scheduled_time = pendulum.DateTime.now("utc")
return self
@model_validator(mode="after")
@@ -395,17 +410,19 @@ def is_paused(self) -> bool:
return self.type == StateType.PAUSED
def model_copy(
- self, *, update: Optional[Dict[str, Any]] = None, deep: bool = False
- ):
+ self, *, update: Optional[Mapping[str, Any]] = None, deep: bool = False
+ ) -> Self:
"""
Copying API models should return an object that could be inserted into the
database again. The 'timestamp' is reset using the default factory.
"""
- update = update or {}
- update.setdefault("timestamp", self.model_fields["timestamp"].get_default())
+ update = {
+ "timestamp": self.model_fields["timestamp"].get_default(),
+ **(update or {}),
+ }
return super().model_copy(update=update, deep=deep)
- def fresh_copy(self, **kwargs) -> Self:
+ def fresh_copy(self, **kwargs: Any) -> Self:
"""
Return a fresh copy of the state with a new ID.
"""
@@ -443,12 +460,14 @@ def __str__(self) -> str:
`MyCompletedState("my message", type=COMPLETED)`
"""
- display = []
+ display: list[str] = []
if self.message:
display.append(repr(self.message))
- if self.type.value.lower() != self.name.lower():
+ if TYPE_CHECKING:
+ assert self.name is not None
+ if self.type.lower() != self.name.lower():
display.append(f"type={self.type.value}")
return f"{self.name}({', '.join(display)})"
@@ -487,7 +506,7 @@ class FlowRunPolicy(PrefectBaseModel):
retry_delay: Optional[int] = Field(
default=None, description="The delay time between retries, in seconds."
)
- pause_keys: Optional[set] = Field(
+ pause_keys: Optional[set[str]] = Field(
default_factory=set, description="Tracks pauses this run has observed."
)
resuming: Optional[bool] = Field(
@@ -499,7 +518,7 @@ class FlowRunPolicy(PrefectBaseModel):
@model_validator(mode="before")
@classmethod
- def populate_deprecated_fields(cls, values: Any):
+ def populate_deprecated_fields(cls, values: Any) -> Any:
if isinstance(values, dict):
return set_run_policy_deprecated_fields(values)
return values
@@ -536,7 +555,7 @@ class FlowRun(ObjectBaseModel):
description="The version of the flow executed in this flow run.",
examples=["1.0"],
)
- parameters: Dict[str, Any] = Field(
+ parameters: dict[str, Any] = Field(
default_factory=dict, description="Parameters for the flow run."
)
idempotency_key: Optional[str] = Field(
@@ -546,7 +565,7 @@ class FlowRun(ObjectBaseModel):
" run is not created multiple times."
),
)
- context: Dict[str, Any] = Field(
+ context: dict[str, Any] = Field(
default_factory=dict,
description="Additional context for the flow run.",
examples=[{"my_var": "my_val"}],
@@ -554,7 +573,7 @@ class FlowRun(ObjectBaseModel):
empirical_policy: FlowRunPolicy = Field(
default_factory=FlowRunPolicy,
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of tags on the flow run",
examples=[["tag-1", "tag-2"]],
@@ -632,7 +651,7 @@ class FlowRun(ObjectBaseModel):
description="The state of the flow run.",
examples=["State(type=StateType.COMPLETED)"],
)
- job_variables: Optional[dict] = Field(
+ job_variables: Optional[dict[str, Any]] = Field(
default=None,
description="Job variables for the flow run.",
)
@@ -663,7 +682,7 @@ def __eq__(self, other: Any) -> bool:
@field_validator("name", mode="before")
@classmethod
- def set_default_name(cls, name):
+ def set_default_name(cls, name: Optional[str]) -> str:
return get_or_create_run_name(name)
@@ -687,7 +706,7 @@ class TaskRunPolicy(PrefectBaseModel):
deprecated=True,
)
retries: Optional[int] = Field(default=None, description="The number of retries.")
- retry_delay: Union[None, int, List[int]] = Field(
+ retry_delay: Union[None, int, list[int]] = Field(
default=None,
description="A delay time or list of delay times between retries, in seconds.",
)
@@ -710,18 +729,20 @@ def populate_deprecated_fields(self):
self.retries = self.max_retries
if not self.retry_delay and self.retry_delay_seconds != 0:
- self.retry_delay = self.retry_delay_seconds
+ self.retry_delay = int(self.retry_delay_seconds)
return self
@field_validator("retry_delay")
@classmethod
- def validate_configured_retry_delays(cls, v):
+ def validate_configured_retry_delays(
+ cls, v: Optional[list[float]]
+ ) -> Optional[list[float]]:
return list_length_50_or_less(v)
@field_validator("retry_jitter_factor")
@classmethod
- def validate_jitter_factor(cls, v):
+ def validate_jitter_factor(cls, v: Optional[float]) -> Optional[float]:
return validate_not_negative(v)
@@ -731,9 +752,11 @@ class TaskRunInput(PrefectBaseModel):
could include, constants, parameters, or other task runs.
"""
- model_config = ConfigDict(frozen=True)
+ model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
- input_type: str
+ if not TYPE_CHECKING:
+ # subclasses provide the concrete type for this field
+ input_type: str
class TaskRunResult(TaskRunInput):
@@ -791,7 +814,7 @@ class TaskRun(ObjectBaseModel):
empirical_policy: TaskRunPolicy = Field(
default_factory=TaskRunPolicy,
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of tags for the task run.",
examples=[["tag-1", "tag-2"]],
@@ -800,7 +823,7 @@ class TaskRun(ObjectBaseModel):
state_id: Optional[UUID] = Field(
default=None, description="The id of the current task run state."
)
- task_inputs: Dict[str, List[Union[TaskRunResult, Parameter, Constant]]] = Field(
+ task_inputs: dict[str, list[Union[TaskRunResult, Parameter, Constant]]] = Field(
default_factory=dict,
description=(
"Tracks the source of inputs to a task run. Used for internal bookkeeping. "
@@ -865,7 +888,7 @@ class TaskRun(ObjectBaseModel):
@field_validator("name", mode="before")
@classmethod
- def set_default_name(cls, name):
+ def set_default_name(cls, name: Optional[str]) -> Name:
return get_or_create_run_name(name)
@@ -883,7 +906,7 @@ class Workspace(PrefectBaseModel):
workspace_name: str = Field(..., description="The workspace name.")
workspace_description: str = Field(..., description="Description of the workspace.")
workspace_handle: str = Field(..., description="The workspace's unique handle.")
- model_config = ConfigDict(extra="ignore")
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="ignore")
@property
def handle(self) -> str:
@@ -912,7 +935,7 @@ def ui_url(self) -> str:
f"/workspace/{self.workspace_id}"
)
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self.handle)
@@ -935,7 +958,7 @@ class IPAllowlist(PrefectBaseModel):
Expected payload for an IP allowlist from the Prefect Cloud API.
"""
- entries: List[IPAllowlistEntry]
+ entries: list[IPAllowlistEntry]
class IPAllowlistMyAccessResponse(PrefectBaseModel):
@@ -973,14 +996,14 @@ class BlockSchema(ObjectBaseModel):
"""A representation of a block schema."""
checksum: str = Field(default=..., description="The block schema's unique checksum")
- fields: Dict[str, Any] = Field(
+ fields: dict[str, Any] = Field(
default_factory=dict, description="The block schema's field schema"
)
block_type_id: Optional[UUID] = Field(default=..., description="A block type ID")
block_type: Optional[BlockType] = Field(
default=None, description="The associated block type"
)
- capabilities: List[str] = Field(
+ capabilities: list[str] = Field(
default_factory=list,
description="A list of Block capabilities",
)
@@ -999,7 +1022,7 @@ class BlockDocument(ObjectBaseModel):
"The block document's name. Not required for anonymous block documents."
),
)
- data: Dict[str, Any] = Field(
+ data: dict[str, Any] = Field(
default_factory=dict, description="The block document's data"
)
block_schema_id: UUID = Field(default=..., description="A block schema ID")
@@ -1011,7 +1034,7 @@ class BlockDocument(ObjectBaseModel):
block_type: Optional[BlockType] = Field(
default=None, description="The associated block type"
)
- block_document_references: Dict[str, Dict[str, Any]] = Field(
+ block_document_references: dict[str, dict[str, Any]] = Field(
default_factory=dict, description="Record of the block document's references"
)
is_anonymous: bool = Field(
@@ -1026,13 +1049,15 @@ class BlockDocument(ObjectBaseModel):
@model_validator(mode="before")
@classmethod
- def validate_name_is_present_if_not_anonymous(cls, values):
+ def validate_name_is_present_if_not_anonymous(
+ cls, values: dict[str, Any]
+ ) -> dict[str, Any]:
return validate_name_present_on_nonanonymous_blocks(values)
@model_serializer(mode="wrap")
def serialize_data(
- self, handler: ModelWrapValidatorHandler, info: SerializationInfo
- ):
+ self, handler: SerializerFunctionWrapHandler, info: SerializationInfo
+ ) -> Any:
self.data = visit_collection(
self.data,
visit_fn=partial(handle_secret_render, context=info.context or {}),
@@ -1047,7 +1072,7 @@ class Flow(ObjectBaseModel):
name: Name = Field(
default=..., description="The name of the flow", examples=["my-flow"]
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of flow tags",
examples=[["tag-1", "tag-2"]],
@@ -1091,22 +1116,22 @@ class Deployment(ObjectBaseModel):
concurrency_limit: Optional[int] = Field(
default=None, description="The concurrency limit for the deployment."
)
- schedules: List[DeploymentSchedule] = Field(
+ schedules: list[DeploymentSchedule] = Field(
default_factory=list, description="A list of schedules for the deployment."
)
- job_variables: Dict[str, Any] = Field(
+ job_variables: dict[str, Any] = Field(
default_factory=dict,
description="Overrides to apply to flow run infrastructure at runtime.",
)
- parameters: Dict[str, Any] = Field(
+ parameters: dict[str, Any] = Field(
default_factory=dict,
description="Parameters for flow runs scheduled by the deployment.",
)
- pull_steps: Optional[List[dict]] = Field(
+ pull_steps: Optional[list[dict[str, Any]]] = Field(
default=None,
description="Pull steps for cloning and running this deployment.",
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of tags for the deployment",
examples=[["tag-1", "tag-2"]],
@@ -1123,7 +1148,7 @@ class Deployment(ObjectBaseModel):
default=None,
description="The last time the deployment was polled for status updates.",
)
- parameter_openapi_schema: Optional[Dict[str, Any]] = Field(
+ parameter_openapi_schema: Optional[dict[str, Any]] = Field(
default=None,
description="The parameter schema of the flow, including defaults.",
)
@@ -1177,7 +1202,7 @@ class ConcurrencyLimit(ObjectBaseModel):
default=..., description="A tag the concurrency limit is applied to."
)
concurrency_limit: int = Field(default=..., description="The concurrency limit.")
- active_slots: List[UUID] = Field(
+ active_slots: list[UUID] = Field(
default_factory=list,
description="A list of active run ids using a concurrency slot",
)
@@ -1224,7 +1249,7 @@ class BlockDocumentReference(ObjectBaseModel):
@model_validator(mode="before")
@classmethod
- def validate_parent_and_ref_are_different(cls, values):
+ def validate_parent_and_ref_are_different(cls, values: Any) -> Any:
if isinstance(values, dict):
return validate_parent_and_ref_diff(values)
return values
@@ -1234,7 +1259,7 @@ class Configuration(ObjectBaseModel):
"""An ORM representation of account info."""
key: str = Field(default=..., description="Account info key")
- value: Dict[str, Any] = Field(default=..., description="Account info")
+ value: dict[str, Any] = Field(default=..., description="Account info")
class SavedSearchFilter(PrefectBaseModel):
@@ -1258,7 +1283,7 @@ class SavedSearch(ObjectBaseModel):
"""An ORM representation of saved search data. Represents a set of filter criteria."""
name: str = Field(default=..., description="The name of the saved search.")
- filters: List[SavedSearchFilter] = Field(
+ filters: list[SavedSearchFilter] = Field(
default_factory=list, description="The filter set for the saved search."
)
@@ -1281,11 +1306,11 @@ class Log(ObjectBaseModel):
class QueueFilter(PrefectBaseModel):
"""Filter criteria definition for a work queue."""
- tags: Optional[List[str]] = Field(
+ tags: Optional[list[str]] = Field(
default=None,
description="Only include flow runs with these tags in the work queue.",
)
- deployment_ids: Optional[List[UUID]] = Field(
+ deployment_ids: Optional[list[UUID]] = Field(
default=None,
description="Only include flow runs from these deployments in the work queue.",
)
@@ -1345,7 +1370,7 @@ class WorkQueueHealthPolicy(PrefectBaseModel):
)
def evaluate_health_status(
- self, late_runs_count: int, last_polled: Optional[DateTime] = None
+ self, late_runs_count: int, last_polled: Optional[pendulum.DateTime] = None
) -> bool:
"""
Given empirical information about the state of the work queue, evaluate its health status.
@@ -1397,10 +1422,10 @@ class FlowRunNotificationPolicy(ObjectBaseModel):
is_active: bool = Field(
default=True, description="Whether the policy is currently active"
)
- state_names: List[str] = Field(
+ state_names: list[str] = Field(
default=..., description="The flow run states that trigger notifications"
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default=...,
description="The flow run tags that trigger notifications (set [] to disable)",
)
@@ -1422,7 +1447,7 @@ class FlowRunNotificationPolicy(ObjectBaseModel):
@field_validator("message_template")
@classmethod
- def validate_message_template_variables(cls, v):
+ def validate_message_template_variables(cls, v: Optional[str]) -> Optional[str]:
return validate_message_template_variables(v)
@@ -1454,7 +1479,7 @@ class WorkPool(ObjectBaseModel):
default=None, description="A description of the work pool."
)
type: str = Field(description="The work pool type.")
- base_job_template: Dict[str, Any] = Field(
+ base_job_template: dict[str, Any] = Field(
default_factory=dict, description="The work pool's base job template."
)
is_paused: bool = Field(
@@ -1469,10 +1494,12 @@ class WorkPool(ObjectBaseModel):
)
# this required field has a default of None so that the custom validator
- # below will be called and produce a more helpful error message
- default_queue_id: UUID = Field(
- None, description="The id of the pool's default queue."
- )
+ # below will be called and produce a more helpful error message. Because
+ # the field metadata is attached via an annotation, the default is hidden
+ # from type checkers.
+ default_queue_id: Annotated[
+ UUID, Field(default=None, description="The id of the pool's default queue.")
+ ]
@property
def is_push_pool(self) -> bool:
@@ -1484,7 +1511,7 @@ def is_managed_pool(self) -> bool:
@field_validator("default_queue_id")
@classmethod
- def helpful_error_for_missing_default_queue_id(cls, v):
+ def helpful_error_for_missing_default_queue_id(cls, v: Optional[UUID]) -> UUID:
return validate_default_queue_id_not_none(v)
@@ -1495,8 +1522,8 @@ class Worker(ObjectBaseModel):
work_pool_id: UUID = Field(
description="The work pool with which the queue is associated."
)
- last_heartbeat_time: datetime.datetime = Field(
- None, description="The last time the worker process sent a heartbeat."
+ last_heartbeat_time: Optional[datetime.datetime] = Field(
+ default=None, description="The last time the worker process sent a heartbeat."
)
heartbeat_interval_seconds: Optional[int] = Field(
default=None,
@@ -1529,14 +1556,14 @@ class Artifact(ObjectBaseModel):
default=None, description="A markdown-enabled description of the artifact."
)
# data will eventually be typed as `Optional[Union[Result, Any]]`
- data: Optional[Union[Dict[str, Any], Any]] = Field(
+ data: Optional[Union[dict[str, Any], Any]] = Field(
default=None,
description=(
"Data associated with the artifact, e.g. a result.; structure depends on"
" the artifact type."
),
)
- metadata_: Optional[Dict[str, str]] = Field(
+ metadata_: Optional[dict[str, str]] = Field(
default=None,
description=(
"User-defined artifact metadata. Content must be string key and value"
@@ -1552,7 +1579,9 @@ class Artifact(ObjectBaseModel):
@field_validator("metadata_")
@classmethod
- def validate_metadata_length(cls, v):
+ def validate_metadata_length(
+ cls, v: Optional[dict[str, str]]
+ ) -> Optional[dict[str, str]]:
return validate_max_metadata_length(v)
@@ -1571,14 +1600,14 @@ class ArtifactCollection(ObjectBaseModel):
description: Optional[str] = Field(
default=None, description="A markdown-enabled description of the artifact."
)
- data: Optional[Union[Dict[str, Any], Any]] = Field(
+ data: Optional[Union[dict[str, Any], Any]] = Field(
default=None,
description=(
"Data associated with the artifact, e.g. a result.; structure depends on"
" the artifact type."
),
)
- metadata_: Optional[Dict[str, str]] = Field(
+ metadata_: Optional[dict[str, str]] = Field(
default=None,
description=(
"User-defined artifact metadata. Content must be string key and value"
@@ -1605,7 +1634,7 @@ class Variable(ObjectBaseModel):
description="The value of the variable",
examples=["my_value"],
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of variable tags",
examples=[["tag-1", "tag-2"]],
@@ -1630,7 +1659,7 @@ def decoded_value(self) -> Any:
@field_validator("key", check_fields=False)
@classmethod
- def validate_name_characters(cls, v):
+ def validate_name_characters(cls, v: str) -> str:
raise_on_name_alphanumeric_dashes_only(v)
return v
@@ -1675,7 +1704,7 @@ class CsrfToken(ObjectBaseModel):
)
-__getattr__ = getattr_migration(__name__)
+__getattr__: Callable[[str], Any] = getattr_migration(__name__)
class Integration(PrefectBaseModel):
@@ -1693,7 +1722,7 @@ class WorkerMetadata(PrefectBaseModel):
should support flexible metadata.
"""
- integrations: List[Integration] = Field(
+ integrations: list[Integration] = Field(
default=..., description="Prefect integrations installed in the worker."
)
- model_config = ConfigDict(extra="allow")
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="allow")
diff --git a/src/prefect/client/schemas/responses.py b/src/prefect/client/schemas/responses.py
index 29102b65f022..cb27a6f55392 100644
--- a/src/prefect/client/schemas/responses.py
+++ b/src/prefect/client/schemas/responses.py
@@ -1,5 +1,5 @@
import datetime
-from typing import Any, Dict, List, Optional, TypeVar, Union
+from typing import Any, ClassVar, Generic, Optional, TypeVar, Union
from uuid import UUID
from pydantic import ConfigDict, Field
@@ -13,7 +13,7 @@
from prefect.utilities.collections import AutoEnum
from prefect.utilities.names import generate_slug
-R = TypeVar("R")
+T = TypeVar("T")
class SetStateStatus(AutoEnum):
@@ -120,7 +120,7 @@ class HistoryResponse(PrefectBaseModel):
interval_end: DateTime = Field(
default=..., description="The end date of the interval."
)
- states: List[HistoryResponseState] = Field(
+ states: list[HistoryResponseState] = Field(
default=..., description="A list of state histories during the interval."
)
@@ -130,18 +130,18 @@ class HistoryResponse(PrefectBaseModel):
]
-class OrchestrationResult(PrefectBaseModel):
+class OrchestrationResult(PrefectBaseModel, Generic[T]):
"""
A container for the output of state orchestration.
"""
- state: Optional[objects.State]
+ state: Optional[objects.State[T]]
status: SetStateStatus
details: StateResponseDetails
class WorkerFlowRunResponse(PrefectBaseModel):
- model_config = ConfigDict(arbitrary_types_allowed=True)
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
work_pool_id: UUID
work_queue_id: UUID
@@ -179,7 +179,7 @@ class FlowRunResponse(ObjectBaseModel):
description="The version of the flow executed in this flow run.",
examples=["1.0"],
)
- parameters: Dict[str, Any] = Field(
+ parameters: dict[str, Any] = Field(
default_factory=dict, description="Parameters for the flow run."
)
idempotency_key: Optional[str] = Field(
@@ -189,7 +189,7 @@ class FlowRunResponse(ObjectBaseModel):
" run is not created multiple times."
),
)
- context: Dict[str, Any] = Field(
+ context: dict[str, Any] = Field(
default_factory=dict,
description="Additional context for the flow run.",
examples=[{"my_var": "my_val"}],
@@ -197,7 +197,7 @@ class FlowRunResponse(ObjectBaseModel):
empirical_policy: objects.FlowRunPolicy = Field(
default_factory=objects.FlowRunPolicy,
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of tags on the flow run",
examples=[["tag-1", "tag-2"]],
@@ -275,7 +275,7 @@ class FlowRunResponse(ObjectBaseModel):
description="The state of the flow run.",
examples=["objects.State(type=objects.StateType.COMPLETED)"],
)
- job_variables: Optional[dict] = Field(
+ job_variables: Optional[dict[str, Any]] = Field(
default=None, description="Job variables for the flow run."
)
@@ -335,22 +335,22 @@ class DeploymentResponse(ObjectBaseModel):
default=None,
description="The concurrency options for the deployment.",
)
- schedules: List[objects.DeploymentSchedule] = Field(
+ schedules: list[objects.DeploymentSchedule] = Field(
default_factory=list, description="A list of schedules for the deployment."
)
- job_variables: Dict[str, Any] = Field(
+ job_variables: dict[str, Any] = Field(
default_factory=dict,
description="Overrides to apply to flow run infrastructure at runtime.",
)
- parameters: Dict[str, Any] = Field(
+ parameters: dict[str, Any] = Field(
default_factory=dict,
description="Parameters for flow runs scheduled by the deployment.",
)
- pull_steps: Optional[List[dict]] = Field(
+ pull_steps: Optional[list[dict[str, Any]]] = Field(
default=None,
description="Pull steps for cloning and running this deployment.",
)
- tags: List[str] = Field(
+ tags: list[str] = Field(
default_factory=list,
description="A list of tags for the deployment",
examples=[["tag-1", "tag-2"]],
@@ -367,7 +367,7 @@ class DeploymentResponse(ObjectBaseModel):
default=None,
description="The last time the deployment was polled for status updates.",
)
- parameter_openapi_schema: Optional[Dict[str, Any]] = Field(
+ parameter_openapi_schema: Optional[dict[str, Any]] = Field(
default=None,
description="The parameter schema of the flow, including defaults.",
)
@@ -400,7 +400,7 @@ class DeploymentResponse(ObjectBaseModel):
default=None,
description="Optional information about the updater of this deployment.",
)
- work_queue_id: UUID = Field(
+ work_queue_id: Optional[UUID] = Field(
default=None,
description=(
"The id of the work pool queue to which this deployment is assigned."
@@ -423,7 +423,7 @@ class DeploymentResponse(ObjectBaseModel):
class MinimalConcurrencyLimitResponse(PrefectBaseModel):
- model_config = ConfigDict(extra="ignore")
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="ignore")
id: UUID
name: str
diff --git a/src/prefect/client/schemas/schedules.py b/src/prefect/client/schemas/schedules.py
index 1a2b97a74f8f..4b9cf1b3cf5b 100644
--- a/src/prefect/client/schemas/schedules.py
+++ b/src/prefect/client/schemas/schedules.py
@@ -3,13 +3,13 @@
"""
import datetime
-from typing import Annotated, Any, Optional, Union
+from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Optional, Union
import dateutil
import dateutil.rrule
+import dateutil.tz
import pendulum
from pydantic import AfterValidator, ConfigDict, Field, field_validator, model_validator
-from pydantic_extra_types.pendulum_dt import DateTime
from typing_extensions import TypeAlias, TypeGuard
from prefect._internal.schemas.bases import PrefectBaseModel
@@ -20,6 +20,14 @@
validate_rrule_string,
)
+if TYPE_CHECKING:
+ # type checkers have difficulty accepting that
+ # pydantic_extra_types.pendulum_dt and pendulum.DateTime can be used
+ # together.
+ DateTime = pendulum.DateTime
+else:
+ from pydantic_extra_types.pendulum_dt import DateTime
+
MAX_ITERATIONS = 1000
# approx. 1 years worth of RDATEs + buffer
MAX_RRULE_LENGTH = 6500
@@ -54,7 +62,7 @@ class IntervalSchedule(PrefectBaseModel):
timezone (str, optional): a valid timezone string
"""
- model_config = ConfigDict(extra="forbid", exclude_none=True)
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")
interval: datetime.timedelta = Field(gt=datetime.timedelta(0))
anchor_date: Annotated[DateTime, AfterValidator(default_anchor_date)] = Field(
@@ -68,6 +76,19 @@ def validate_timezone(self):
self.timezone = default_timezone(self.timezone, self.model_dump())
return self
+ if TYPE_CHECKING:
+ # The model accepts str or datetime values for `anchor_date`
+ def __init__(
+ self,
+ /,
+ interval: datetime.timedelta,
+ anchor_date: Optional[
+ Union[pendulum.DateTime, datetime.datetime, str]
+ ] = None,
+ timezone: Optional[str] = None,
+ ) -> None:
+ ...
+
class CronSchedule(PrefectBaseModel):
"""
@@ -94,7 +115,7 @@ class CronSchedule(PrefectBaseModel):
"""
- model_config = ConfigDict(extra="forbid")
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")
cron: str = Field(default=..., examples=["0 0 * * *"])
timezone: Optional[str] = Field(default=None, examples=["America/New_York"])
@@ -107,18 +128,36 @@ class CronSchedule(PrefectBaseModel):
@field_validator("timezone")
@classmethod
- def valid_timezone(cls, v):
+ def valid_timezone(cls, v: Optional[str]) -> str:
return default_timezone(v)
@field_validator("cron")
@classmethod
- def valid_cron_string(cls, v):
+ def valid_cron_string(cls, v: str) -> str:
return validate_cron_string(v)
DEFAULT_ANCHOR_DATE = pendulum.date(2020, 1, 1)
+def _rrule_dt(
+ rrule: dateutil.rrule.rrule, name: str = "_dtstart"
+) -> Optional[datetime.datetime]:
+ return getattr(rrule, name, None)
+
+
+def _rrule(
+ rruleset: dateutil.rrule.rruleset, name: str = "_rrule"
+) -> list[dateutil.rrule.rrule]:
+ return getattr(rruleset, name, [])
+
+
+def _rdates(
+ rrule: dateutil.rrule.rruleset, name: str = "_rdate"
+) -> list[datetime.datetime]:
+ return getattr(rrule, name, [])
+
+
class RRuleSchedule(PrefectBaseModel):
"""
RRule schedule, based on the iCalendar standard
@@ -139,7 +178,7 @@ class RRuleSchedule(PrefectBaseModel):
timezone (str, optional): a valid timezone string
"""
- model_config = ConfigDict(extra="forbid")
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")
rrule: str
timezone: Optional[str] = Field(
@@ -148,58 +187,60 @@ class RRuleSchedule(PrefectBaseModel):
@field_validator("rrule")
@classmethod
- def validate_rrule_str(cls, v):
+ def validate_rrule_str(cls, v: str) -> str:
return validate_rrule_string(v)
@classmethod
- def from_rrule(cls, rrule: dateutil.rrule.rrule):
+ def from_rrule(
+ cls, rrule: Union[dateutil.rrule.rrule, dateutil.rrule.rruleset]
+ ) -> "RRuleSchedule":
if isinstance(rrule, dateutil.rrule.rrule):
- if rrule._dtstart.tzinfo is not None:
- timezone = rrule._dtstart.tzinfo.name
+ dtstart = _rrule_dt(rrule)
+ if dtstart and dtstart.tzinfo is not None:
+ timezone = dtstart.tzinfo.tzname(dtstart)
else:
timezone = "UTC"
return RRuleSchedule(rrule=str(rrule), timezone=timezone)
- elif isinstance(rrule, dateutil.rrule.rruleset):
- dtstarts = [rr._dtstart for rr in rrule._rrule if rr._dtstart is not None]
- unique_dstarts = set(pendulum.instance(d).in_tz("UTC") for d in dtstarts)
- unique_timezones = set(d.tzinfo for d in dtstarts if d.tzinfo is not None)
-
- if len(unique_timezones) > 1:
- raise ValueError(
- f"rruleset has too many dtstart timezones: {unique_timezones}"
- )
-
- if len(unique_dstarts) > 1:
- raise ValueError(f"rruleset has too many dtstarts: {unique_dstarts}")
-
- if unique_dstarts and unique_timezones:
- timezone = dtstarts[0].tzinfo.name
- else:
- timezone = "UTC"
-
- rruleset_string = ""
- if rrule._rrule:
- rruleset_string += "\n".join(str(r) for r in rrule._rrule)
- if rrule._exrule:
- rruleset_string += "\n" if rruleset_string else ""
- rruleset_string += "\n".join(str(r) for r in rrule._exrule).replace(
- "RRULE", "EXRULE"
- )
- if rrule._rdate:
- rruleset_string += "\n" if rruleset_string else ""
- rruleset_string += "RDATE:" + ",".join(
- rd.strftime("%Y%m%dT%H%M%SZ") for rd in rrule._rdate
- )
- if rrule._exdate:
- rruleset_string += "\n" if rruleset_string else ""
- rruleset_string += "EXDATE:" + ",".join(
- exd.strftime("%Y%m%dT%H%M%SZ") for exd in rrule._exdate
- )
- return RRuleSchedule(rrule=rruleset_string, timezone=timezone)
+ rrules = _rrule(rrule)
+ dtstarts = [dts for rr in rrules if (dts := _rrule_dt(rr)) is not None]
+ unique_dstarts = set(pendulum.instance(d).in_tz("UTC") for d in dtstarts)
+ unique_timezones = set(d.tzinfo for d in dtstarts if d.tzinfo is not None)
+
+ if len(unique_timezones) > 1:
+ raise ValueError(
+ f"rruleset has too many dtstart timezones: {unique_timezones}"
+ )
+
+ if len(unique_dstarts) > 1:
+ raise ValueError(f"rruleset has too many dtstarts: {unique_dstarts}")
+
+ if unique_dstarts and unique_timezones:
+ [unique_tz] = unique_timezones
+ timezone = unique_tz.tzname(dtstarts[0])
else:
- raise ValueError(f"Invalid RRule object: {rrule}")
-
- def to_rrule(self) -> dateutil.rrule.rrule:
+ timezone = "UTC"
+
+ rruleset_string = ""
+ if rrules:
+ rruleset_string += "\n".join(str(r) for r in rrules)
+ if exrule := _rrule(rrule, "_exrule"):
+ rruleset_string += "\n" if rruleset_string else ""
+ rruleset_string += "\n".join(str(r) for r in exrule).replace(
+ "RRULE", "EXRULE"
+ )
+ if rdates := _rdates(rrule):
+ rruleset_string += "\n" if rruleset_string else ""
+ rruleset_string += "RDATE:" + ",".join(
+ rd.strftime("%Y%m%dT%H%M%SZ") for rd in rdates
+ )
+ if exdates := _rdates(rrule, "_exdate"):
+ rruleset_string += "\n" if rruleset_string else ""
+ rruleset_string += "EXDATE:" + ",".join(
+ exd.strftime("%Y%m%dT%H%M%SZ") for exd in exdates
+ )
+ return RRuleSchedule(rrule=rruleset_string, timezone=timezone)
+
+ def to_rrule(self) -> Union[dateutil.rrule.rrule, dateutil.rrule.rruleset]:
"""
Since rrule doesn't properly serialize/deserialize timezones, we localize dates
here
@@ -211,51 +252,53 @@ def to_rrule(self) -> dateutil.rrule.rrule:
)
timezone = dateutil.tz.gettz(self.timezone)
if isinstance(rrule, dateutil.rrule.rrule):
- kwargs = dict(dtstart=rrule._dtstart.replace(tzinfo=timezone))
- if rrule._until:
+ dtstart = _rrule_dt(rrule)
+ assert dtstart is not None
+ kwargs: dict[str, Any] = dict(dtstart=dtstart.replace(tzinfo=timezone))
+ if until := _rrule_dt(rrule, "_until"):
kwargs.update(
- until=rrule._until.replace(tzinfo=timezone),
+ until=until.replace(tzinfo=timezone),
)
return rrule.replace(**kwargs)
- elif isinstance(rrule, dateutil.rrule.rruleset):
- # update rrules
- localized_rrules = []
- for rr in rrule._rrule:
- kwargs = dict(dtstart=rr._dtstart.replace(tzinfo=timezone))
- if rr._until:
- kwargs.update(
- until=rr._until.replace(tzinfo=timezone),
- )
- localized_rrules.append(rr.replace(**kwargs))
- rrule._rrule = localized_rrules
-
- # update exrules
- localized_exrules = []
- for exr in rrule._exrule:
- kwargs = dict(dtstart=exr._dtstart.replace(tzinfo=timezone))
- if exr._until:
- kwargs.update(
- until=exr._until.replace(tzinfo=timezone),
- )
- localized_exrules.append(exr.replace(**kwargs))
- rrule._exrule = localized_exrules
-
- # update rdates
- localized_rdates = []
- for rd in rrule._rdate:
- localized_rdates.append(rd.replace(tzinfo=timezone))
- rrule._rdate = localized_rdates
-
- # update exdates
- localized_exdates = []
- for exd in rrule._exdate:
- localized_exdates.append(exd.replace(tzinfo=timezone))
- rrule._exdate = localized_exdates
-
- return rrule
+
+ # update rrules
+ localized_rrules: list[dateutil.rrule.rrule] = []
+ for rr in _rrule(rrule):
+ dtstart = _rrule_dt(rr)
+ assert dtstart is not None
+ kwargs: dict[str, Any] = dict(dtstart=dtstart.replace(tzinfo=timezone))
+ if until := _rrule_dt(rr, "_until"):
+ kwargs.update(until=until.replace(tzinfo=timezone))
+ localized_rrules.append(rr.replace(**kwargs))
+ setattr(rrule, "_rrule", localized_rrules)
+
+ # update exrules
+ localized_exrules: list[dateutil.rrule.rruleset] = []
+ for exr in _rrule(rrule, "_exrule"):
+ dtstart = _rrule_dt(exr)
+ assert dtstart is not None
+ kwargs = dict(dtstart=dtstart.replace(tzinfo=timezone))
+ if until := _rrule_dt(exr, "_until"):
+ kwargs.update(until=until.replace(tzinfo=timezone))
+ localized_exrules.append(exr.replace(**kwargs))
+ setattr(rrule, "_exrule", localized_exrules)
+
+ # update rdates
+ localized_rdates: list[datetime.datetime] = []
+ for rd in _rdates(rrule):
+ localized_rdates.append(rd.replace(tzinfo=timezone))
+ setattr(rrule, "_rdate", localized_rdates)
+
+ # update exdates
+ localized_exdates: list[datetime.datetime] = []
+ for exd in _rdates(rrule, "_exdate"):
+ localized_exdates.append(exd.replace(tzinfo=timezone))
+ setattr(rrule, "_exdate", localized_exdates)
+
+ return rrule
@field_validator("timezone")
- def valid_timezone(cls, v):
+ def valid_timezone(cls, v: Optional[str]) -> str:
"""
Validate that the provided timezone is a valid IANA timezone.
@@ -277,7 +320,7 @@ def valid_timezone(cls, v):
class NoSchedule(PrefectBaseModel):
- model_config = ConfigDict(extra="forbid")
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")
SCHEDULE_TYPES: TypeAlias = Union[
@@ -326,7 +369,7 @@ def construct_schedule(
if isinstance(interval, (int, float)):
interval = datetime.timedelta(seconds=interval)
if not anchor_date:
- anchor_date = DateTime.now()
+ anchor_date = pendulum.DateTime.now()
schedule = IntervalSchedule(
interval=interval, anchor_date=anchor_date, timezone=timezone
)
diff --git a/src/prefect/client/subscriptions.py b/src/prefect/client/subscriptions.py
index d13873e14b05..8e04b3735e8a 100644
--- a/src/prefect/client/subscriptions.py
+++ b/src/prefect/client/subscriptions.py
@@ -1,5 +1,7 @@
import asyncio
-from typing import Any, Dict, Generic, Iterable, Optional, Type, TypeVar
+from collections.abc import Iterable
+from logging import Logger
+from typing import Any, Generic, Optional, TypeVar
import orjson
import websockets
@@ -11,7 +13,7 @@
from prefect.logging import get_logger
from prefect.settings import PREFECT_API_KEY
-logger = get_logger(__name__)
+logger: Logger = get_logger(__name__)
S = TypeVar("S", bound=IDBaseModel)
@@ -19,7 +21,7 @@
class Subscription(Generic[S]):
def __init__(
self,
- model: Type[S],
+ model: type[S],
path: str,
keys: Iterable[str],
client_id: Optional[str] = None,
@@ -28,9 +30,9 @@ def __init__(
self.model = model
self.client_id = client_id
base_url = base_url.replace("http", "ws", 1) if base_url else None
- self.subscription_url = f"{base_url}{path}"
+ self.subscription_url: str = f"{base_url}{path}"
- self.keys = list(keys)
+ self.keys: list[str] = list(keys)
self._connect = websockets.connect(
self.subscription_url,
@@ -78,10 +80,10 @@ async def _ensure_connected(self):
).decode()
)
- auth: Dict[str, Any] = orjson.loads(await websocket.recv())
+ auth: dict[str, Any] = orjson.loads(await websocket.recv())
assert auth["type"] == "auth_success", auth.get("message")
- message = {"type": "subscribe", "keys": self.keys}
+ message: dict[str, Any] = {"type": "subscribe", "keys": self.keys}
if self.client_id:
message.update({"client_id": self.client_id})
diff --git a/src/prefect/client/utilities.py b/src/prefect/client/utilities.py
index 81ff31199e6e..86e7be152f65 100644
--- a/src/prefect/client/utilities.py
+++ b/src/prefect/client/utilities.py
@@ -5,32 +5,31 @@
# This module must not import from `prefect.client` when it is imported to avoid
# circular imports for decorators such as `inject_client` which are widely used.
+from collections.abc import Awaitable, Coroutine
from functools import wraps
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Coroutine,
- Optional,
- Tuple,
- TypeVar,
- Union,
- cast,
-)
-
-from typing_extensions import Concatenate, ParamSpec
+from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+
+from typing_extensions import Concatenate, ParamSpec, TypeIs, TypeVar
if TYPE_CHECKING:
from prefect.client.orchestration import PrefectClient, SyncPrefectClient
P = ParamSpec("P")
-R = TypeVar("R")
+R = TypeVar("R", infer_variance=True)
+
+
+def _current_async_client(
+ client: Union["PrefectClient", "SyncPrefectClient"],
+) -> TypeIs["PrefectClient"]:
+ from prefect._internal.concurrency.event_loop import get_running_loop
+
+ # Only a PrefectClient will have a _loop attribute that is the current loop
+ return getattr(client, "_loop", None) == get_running_loop()
def get_or_create_client(
client: Optional["PrefectClient"] = None,
-) -> Tuple[Union["PrefectClient", "SyncPrefectClient"], bool]:
+) -> tuple["PrefectClient", bool]:
"""
Returns provided client, infers a client from context if available, or creates a new client.
@@ -42,29 +41,22 @@ def get_or_create_client(
"""
if client is not None:
return client, True
- from prefect._internal.concurrency.event_loop import get_running_loop
+
from prefect.context import AsyncClientContext, FlowRunContext, TaskRunContext
async_client_context = AsyncClientContext.get()
flow_run_context = FlowRunContext.get()
task_run_context = TaskRunContext.get()
- if async_client_context and async_client_context.client._loop == get_running_loop(): # type: ignore[reportPrivateUsage]
- return async_client_context.client, True
- elif (
- flow_run_context
- and getattr(flow_run_context.client, "_loop", None) == get_running_loop()
- ):
- return flow_run_context.client, True
- elif (
- task_run_context
- and getattr(task_run_context.client, "_loop", None) == get_running_loop()
- ):
- return task_run_context.client, True
- else:
- from prefect.client.orchestration import get_client as get_httpx_client
+ for context in (async_client_context, flow_run_context, task_run_context):
+ if context is None:
+ continue
+ if _current_async_client(context_client := context.client):
+ return context_client, True
+
+ from prefect.client.orchestration import get_client as get_httpx_client
- return get_httpx_client(), False
+ return get_httpx_client(), False
def client_injector(
@@ -73,7 +65,7 @@ def client_injector(
@wraps(func)
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
client, _ = get_or_create_client()
- return await func(cast("PrefectClient", client), *args, **kwargs)
+ return await func(client, *args, **kwargs)
return wrapper
@@ -91,18 +83,18 @@ def inject_client(
@wraps(fn)
async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> R:
- client, inferred = get_or_create_client(
- cast(Optional["PrefectClient"], kwargs.pop("client", None))
- )
- _client = cast("PrefectClient", client)
+ given = kwargs.pop("client", None)
+ if TYPE_CHECKING:
+ assert given is None or isinstance(given, PrefectClient)
+ client, inferred = get_or_create_client(given)
if not inferred:
- context = _client
+ context = client
else:
from prefect.utilities.asyncutils import asyncnullcontext
- context = asyncnullcontext()
+ context = asyncnullcontext(client)
async with context as new_client:
- kwargs.setdefault("client", new_client or _client)
+ kwargs |= {"client": new_client}
return await fn(*args, **kwargs)
return with_injected_client
diff --git a/src/prefect/main.py b/src/prefect/main.py
index 4fea3999e2ad..0d56990c829d 100644
--- a/src/prefect/main.py
+++ b/src/prefect/main.py
@@ -1,4 +1,6 @@
# Import user-facing API
+from typing import Any
+
from prefect.deployments import deploy
from prefect.states import State
from prefect.logging import get_run_logger
@@ -25,28 +27,17 @@
# Perform any forward-ref updates needed for Pydantic models
import prefect.client.schemas
-prefect.context.FlowRunContext.model_rebuild(
- _types_namespace={
- "Flow": Flow,
- "BaseResult": BaseResult,
- "ResultRecordMetadata": ResultRecordMetadata,
- }
-)
-prefect.context.TaskRunContext.model_rebuild(
- _types_namespace={"Task": Task, "BaseResult": BaseResult}
-)
-prefect.client.schemas.State.model_rebuild(
- _types_namespace={
- "BaseResult": BaseResult,
- "ResultRecordMetadata": ResultRecordMetadata,
- }
-)
-prefect.client.schemas.StateCreate.model_rebuild(
- _types_namespace={
- "BaseResult": BaseResult,
- "ResultRecordMetadata": ResultRecordMetadata,
- }
+_types: dict[str, Any] = dict(
+ Task=Task,
+ Flow=Flow,
+ BaseResult=BaseResult,
+ ResultRecordMetadata=ResultRecordMetadata,
)
+prefect.context.FlowRunContext.model_rebuild(_types_namespace=_types)
+prefect.context.TaskRunContext.model_rebuild(_types_namespace=_types)
+prefect.client.schemas.State.model_rebuild(_types_namespace=_types)
+prefect.client.schemas.StateCreate.model_rebuild(_types_namespace=_types)
+prefect.client.schemas.OrchestrationResult.model_rebuild(_types_namespace=_types)
Transaction.model_rebuild()
# Configure logging
From cac38a155981373f5d1ce9fef20fa9dd4bcbbd5c Mon Sep 17 00:00:00 2001
From: Martijn Pieters
Date: Sun, 8 Dec 2024 19:34:04 +0000
Subject: [PATCH 36/92] [typing] Use correct type narrowing construct (#16269)
---
src/prefect/client/utilities.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/src/prefect/client/utilities.py b/src/prefect/client/utilities.py
index 86e7be152f65..4622a7d6fe32 100644
--- a/src/prefect/client/utilities.py
+++ b/src/prefect/client/utilities.py
@@ -9,7 +9,7 @@
from functools import wraps
from typing import TYPE_CHECKING, Any, Callable, Optional, Union
-from typing_extensions import Concatenate, ParamSpec, TypeIs, TypeVar
+from typing_extensions import Concatenate, ParamSpec, TypeGuard, TypeVar
if TYPE_CHECKING:
from prefect.client.orchestration import PrefectClient, SyncPrefectClient
@@ -20,7 +20,8 @@
def _current_async_client(
client: Union["PrefectClient", "SyncPrefectClient"],
-) -> TypeIs["PrefectClient"]:
+) -> TypeGuard["PrefectClient"]:
+ """Determine if the client is a PrefectClient instance attached to the current loop"""
from prefect._internal.concurrency.event_loop import get_running_loop
# Only a PrefectClient will have a _loop attribute that is the current loop
From 09f71a280fd91dfc485c0f8cf779f422833e7121 Mon Sep 17 00:00:00 2001
From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com>
Date: Sun, 8 Dec 2024 16:44:04 -0800
Subject: [PATCH 37/92] [UI v2] feat: Reverts layout component approach
(#16267)
---
.../concurrency/concurrency-page.tsx | 5 ++--
.../global-concurrency-limits-header.tsx | 5 ++--
ui-v2/src/components/ui/block.tsx | 23 -------------------
ui-v2/src/components/ui/flex.tsx | 20 ----------------
4 files changed, 4 insertions(+), 49 deletions(-)
delete mode 100644 ui-v2/src/components/ui/block.tsx
delete mode 100644 ui-v2/src/components/ui/flex.tsx
diff --git a/ui-v2/src/components/concurrency/concurrency-page.tsx b/ui-v2/src/components/concurrency/concurrency-page.tsx
index 62b1826925f5..85a45c4c915a 100644
--- a/ui-v2/src/components/concurrency/concurrency-page.tsx
+++ b/ui-v2/src/components/concurrency/concurrency-page.tsx
@@ -1,6 +1,5 @@
import type { JSX } from "react";
-import { Flex } from "@/components/ui/flex";
import { Typography } from "@/components/ui/typography";
import { ConcurrencyTabs } from "./concurrency-tabs";
@@ -9,12 +8,12 @@ import { TaskRunConcurrencyView } from "./task-run-concurrenct-view";
export const ConcurrencyPage = (): JSX.Element => {
return (
-
+
Concurrency
}
taskRunView={}
/>
-
+
);
};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx
index e0b809dd3db3..9f10f79f4062 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/global-concurrency-limits-header.tsx
@@ -1,5 +1,4 @@
import { Button } from "@/components/ui/button";
-import { Flex } from "@/components/ui/flex";
import { Icon } from "@/components/ui/icons";
import { Typography } from "@/components/ui/typography";
@@ -9,11 +8,11 @@ type Props = {
export const GlobalConcurrencyLimitsHeader = ({ onAdd }: Props) => {
return (
-
+
Global Concurrency Limits
-
+
);
};
diff --git a/ui-v2/src/components/ui/block.tsx b/ui-v2/src/components/ui/block.tsx
deleted file mode 100644
index adbde80942af..000000000000
--- a/ui-v2/src/components/ui/block.tsx
+++ /dev/null
@@ -1,23 +0,0 @@
-import { cn } from "@/lib/utils";
-import { createElement, forwardRef } from "react";
-import { UtilityProps, spacingUtiltiesClasses } from "./utils/spacing-utils";
-
-type Props = Omit<
- UtilityProps,
- "alignItems" | "alignSelf" | "display" | "flexDirection" | "gap"
-> & {
- className?: string;
- children: React.ReactNode;
-};
-
-export const Block = forwardRef(
- ({ className, ...props }, ref) => {
- return createElement("div", {
- className: cn("block", spacingUtiltiesClasses(props), className),
- ref,
- ...props,
- });
- },
-);
-
-Block.displayName = "Block";
diff --git a/ui-v2/src/components/ui/flex.tsx b/ui-v2/src/components/ui/flex.tsx
deleted file mode 100644
index 1109db0e6455..000000000000
--- a/ui-v2/src/components/ui/flex.tsx
+++ /dev/null
@@ -1,20 +0,0 @@
-import { cn } from "@/lib/utils";
-import { createElement, forwardRef } from "react";
-import { UtilityProps, spacingUtiltiesClasses } from "./utils/spacing-utils";
-
-type Props = Omit & {
- className?: string;
- children: React.ReactNode;
-};
-
-export const Flex = forwardRef(
- ({ className, ...props }, ref) => {
- return createElement("div", {
- className: cn("flex", spacingUtiltiesClasses(props), className),
- ref,
- ...props,
- });
- },
-);
-
-Flex.displayName = "Flex";
From 0f121f8db8379e3adfd4efcb33cba6952bc78cdb Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 8 Dec 2024 19:59:07 -0500
Subject: [PATCH 38/92] Bump @prefecthq/prefect-design from 2.14.14 to 2.14.15
in /ui (#16272)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
ui/package-lock.json | 14 +++++++-------
ui/package.json | 2 +-
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/ui/package-lock.json b/ui/package-lock.json
index 70a46ec2bf49..92fde4c14d64 100644
--- a/ui/package-lock.json
+++ b/ui/package-lock.json
@@ -8,7 +8,7 @@
"name": "@prefecthq/ui",
"version": "2.8.0",
"dependencies": {
- "@prefecthq/prefect-design": "2.14.14",
+ "@prefecthq/prefect-design": "2.14.15",
"@prefecthq/prefect-ui-library": "3.11.21",
"@prefecthq/vue-charts": "2.0.5",
"@prefecthq/vue-compositions": "1.11.5",
@@ -1096,9 +1096,9 @@
}
},
"node_modules/@prefecthq/prefect-design": {
- "version": "2.14.14",
- "resolved": "https://registry.npmjs.org/@prefecthq/prefect-design/-/prefect-design-2.14.14.tgz",
- "integrity": "sha512-yg56mzYBH40yes74YPhE5kX5JAdH/AAj0CIdRLVgm7GKN+V0IMN684NLlDiSqYJlhR4UYKDyb4rusS+O/ZTOEw==",
+ "version": "2.14.15",
+ "resolved": "https://registry.npmjs.org/@prefecthq/prefect-design/-/prefect-design-2.14.15.tgz",
+ "integrity": "sha512-baAVVvn8oNAz2w5gIeppMTxT2cHayRgqG4VT7jYxXBr8oxhIo4nr7/5XM/eHjPmcxN7jSGpdzaiTNXcVKTuZgQ==",
"dependencies": {
"@fontsource-variable/inconsolata": "^5.0.18",
"@fontsource-variable/inter": "^5.0.18",
@@ -8032,9 +8032,9 @@
}
},
"@prefecthq/prefect-design": {
- "version": "2.14.14",
- "resolved": "https://registry.npmjs.org/@prefecthq/prefect-design/-/prefect-design-2.14.14.tgz",
- "integrity": "sha512-yg56mzYBH40yes74YPhE5kX5JAdH/AAj0CIdRLVgm7GKN+V0IMN684NLlDiSqYJlhR4UYKDyb4rusS+O/ZTOEw==",
+ "version": "2.14.15",
+ "resolved": "https://registry.npmjs.org/@prefecthq/prefect-design/-/prefect-design-2.14.15.tgz",
+ "integrity": "sha512-baAVVvn8oNAz2w5gIeppMTxT2cHayRgqG4VT7jYxXBr8oxhIo4nr7/5XM/eHjPmcxN7jSGpdzaiTNXcVKTuZgQ==",
"requires": {
"@fontsource-variable/inconsolata": "^5.0.18",
"@fontsource-variable/inter": "^5.0.18",
diff --git a/ui/package.json b/ui/package.json
index 0f1d6af30c15..2098cfc148c4 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -10,7 +10,7 @@
"validate:types": "vue-tsc --noEmit"
},
"dependencies": {
- "@prefecthq/prefect-design": "2.14.14",
+ "@prefecthq/prefect-design": "2.14.15",
"@prefecthq/prefect-ui-library": "3.11.21",
"@prefecthq/vue-charts": "2.0.5",
"@prefecthq/vue-compositions": "1.11.5",
From c8b0683e7d80bfbfbc2d10acd78b41172b724e1b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 8 Dec 2024 20:00:25 -0500
Subject: [PATCH 39/92] Bump vite from 5.4.11 to 6.0.3 in /ui (#16270)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
ui/package-lock.json | 855 ++++++++++++++++++++++++-------------------
ui/package.json | 2 +-
2 files changed, 480 insertions(+), 377 deletions(-)
diff --git a/ui/package-lock.json b/ui/package-lock.json
index 92fde4c14d64..c00d5d14f229 100644
--- a/ui/package-lock.json
+++ b/ui/package-lock.json
@@ -28,7 +28,7 @@
"eslint": "^8.57.1",
"ts-node": "10.9.2",
"typescript": "^5.7.2",
- "vite": "5.4.11",
+ "vite": "6.0.3",
"vue-tsc": "^2.1.10"
}
},
@@ -118,9 +118,9 @@
}
},
"node_modules/@esbuild/aix-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
- "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz",
+ "integrity": "sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==",
"cpu": [
"ppc64"
],
@@ -130,13 +130,13 @@
"aix"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
- "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.0.tgz",
+ "integrity": "sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==",
"cpu": [
"arm"
],
@@ -146,13 +146,13 @@
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
- "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz",
+ "integrity": "sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==",
"cpu": [
"arm64"
],
@@ -162,13 +162,13 @@
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
- "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.0.tgz",
+ "integrity": "sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==",
"cpu": [
"x64"
],
@@ -178,13 +178,13 @@
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
- "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz",
+ "integrity": "sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==",
"cpu": [
"arm64"
],
@@ -194,13 +194,13 @@
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
- "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz",
+ "integrity": "sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==",
"cpu": [
"x64"
],
@@ -210,13 +210,13 @@
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
- "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz",
+ "integrity": "sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==",
"cpu": [
"arm64"
],
@@ -226,13 +226,13 @@
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
- "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz",
+ "integrity": "sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==",
"cpu": [
"x64"
],
@@ -242,13 +242,13 @@
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
- "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz",
+ "integrity": "sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==",
"cpu": [
"arm"
],
@@ -258,13 +258,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
- "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz",
+ "integrity": "sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==",
"cpu": [
"arm64"
],
@@ -274,13 +274,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
- "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz",
+ "integrity": "sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==",
"cpu": [
"ia32"
],
@@ -290,13 +290,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
- "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz",
+ "integrity": "sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==",
"cpu": [
"loong64"
],
@@ -306,13 +306,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
- "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz",
+ "integrity": "sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==",
"cpu": [
"mips64el"
],
@@ -322,13 +322,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
- "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz",
+ "integrity": "sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==",
"cpu": [
"ppc64"
],
@@ -338,13 +338,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
- "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz",
+ "integrity": "sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==",
"cpu": [
"riscv64"
],
@@ -354,13 +354,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
- "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz",
+ "integrity": "sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==",
"cpu": [
"s390x"
],
@@ -370,13 +370,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
- "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz",
+ "integrity": "sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==",
"cpu": [
"x64"
],
@@ -386,13 +386,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
- "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz",
+ "integrity": "sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==",
"cpu": [
"x64"
],
@@ -402,13 +402,29 @@
"netbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz",
+ "integrity": "sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
- "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz",
+ "integrity": "sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==",
"cpu": [
"x64"
],
@@ -418,13 +434,13 @@
"openbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
- "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz",
+ "integrity": "sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==",
"cpu": [
"x64"
],
@@ -434,13 +450,13 @@
"sunos"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
- "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz",
+ "integrity": "sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==",
"cpu": [
"arm64"
],
@@ -450,13 +466,13 @@
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
- "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz",
+ "integrity": "sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==",
"cpu": [
"ia32"
],
@@ -466,13 +482,13 @@
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
- "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz",
+ "integrity": "sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==",
"cpu": [
"x64"
],
@@ -482,7 +498,7 @@
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@eslint-community/eslint-utils": {
@@ -1210,9 +1226,9 @@
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz",
- "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.28.1.tgz",
+ "integrity": "sha512-2aZp8AES04KI2dy3Ss6/MDjXbwBzj+i0GqKtWXgw2/Ma6E4jJvujryO6gJAghIRVz7Vwr9Gtl/8na3nDUKpraQ==",
"cpu": [
"arm"
],
@@ -1223,9 +1239,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz",
- "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.28.1.tgz",
+ "integrity": "sha512-EbkK285O+1YMrg57xVA+Dp0tDBRB93/BZKph9XhMjezf6F4TpYjaUSuPt5J0fZXlSag0LmZAsTmdGGqPp4pQFA==",
"cpu": [
"arm64"
],
@@ -1236,9 +1252,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz",
- "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.28.1.tgz",
+ "integrity": "sha512-prduvrMKU6NzMq6nxzQw445zXgaDBbMQvmKSJaxpaZ5R1QDM8w+eGxo6Y/jhT/cLoCvnZI42oEqf9KQNYz1fqQ==",
"cpu": [
"arm64"
],
@@ -1249,9 +1265,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz",
- "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.28.1.tgz",
+ "integrity": "sha512-WsvbOunsUk0wccO/TV4o7IKgloJ942hVFK1CLatwv6TJspcCZb9umQkPdvB7FihmdxgaKR5JyxDjWpCOp4uZlQ==",
"cpu": [
"x64"
],
@@ -1261,10 +1277,36 @@
"darwin"
]
},
+ "node_modules/@rollup/rollup-freebsd-arm64": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.28.1.tgz",
+ "integrity": "sha512-HTDPdY1caUcU4qK23FeeGxCdJF64cKkqajU0iBnTVxS8F7H/7BewvYoG+va1KPSL63kQ1PGNyiwKOfReavzvNA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-x64": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.28.1.tgz",
+ "integrity": "sha512-m/uYasxkUevcFTeRSM9TeLyPe2QDuqtjkeoTpP9SW0XxUWfcYrGDMkO/m2tTw+4NMAF9P2fU3Mw4ahNvo7QmsQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz",
- "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.28.1.tgz",
+ "integrity": "sha512-QAg11ZIt6mcmzpNE6JZBpKfJaKkqTm1A9+y9O+frdZJEuhQxiugM05gnCWiANHj4RmbgeVJpTdmKRmH/a+0QbA==",
"cpu": [
"arm"
],
@@ -1275,9 +1317,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz",
- "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.28.1.tgz",
+ "integrity": "sha512-dRP9PEBfolq1dmMcFqbEPSd9VlRuVWEGSmbxVEfiq2cs2jlZAl0YNxFzAQS2OrQmsLBLAATDMb3Z6MFv5vOcXg==",
"cpu": [
"arm"
],
@@ -1288,9 +1330,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz",
- "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.28.1.tgz",
+ "integrity": "sha512-uGr8khxO+CKT4XU8ZUH1TTEUtlktK6Kgtv0+6bIFSeiSlnGJHG1tSFSjm41uQ9sAO/5ULx9mWOz70jYLyv1QkA==",
"cpu": [
"arm64"
],
@@ -1301,9 +1343,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz",
- "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.28.1.tgz",
+ "integrity": "sha512-QF54q8MYGAqMLrX2t7tNpi01nvq5RI59UBNx+3+37zoKX5KViPo/gk2QLhsuqok05sSCRluj0D00LzCwBikb0A==",
"cpu": [
"arm64"
],
@@ -1313,10 +1355,23 @@
"linux"
]
},
+ "node_modules/@rollup/rollup-linux-loongarch64-gnu": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.28.1.tgz",
+ "integrity": "sha512-vPul4uodvWvLhRco2w0GcyZcdyBfpfDRgNKU+p35AWEbJ/HPs1tOUrkSueVbBS0RQHAf/A+nNtDpvw95PeVKOA==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz",
- "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.28.1.tgz",
+ "integrity": "sha512-pTnTdBuC2+pt1Rmm2SV7JWRqzhYpEILML4PKODqLz+C7Ou2apEV52h19CR7es+u04KlqplggmN9sqZlekg3R1A==",
"cpu": [
"ppc64"
],
@@ -1327,9 +1382,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz",
- "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.28.1.tgz",
+ "integrity": "sha512-vWXy1Nfg7TPBSuAncfInmAI/WZDd5vOklyLJDdIRKABcZWojNDY0NJwruY2AcnCLnRJKSaBgf/GiJfauu8cQZA==",
"cpu": [
"riscv64"
],
@@ -1340,9 +1395,9 @@
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz",
- "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.28.1.tgz",
+ "integrity": "sha512-/yqC2Y53oZjb0yz8PVuGOQQNOTwxcizudunl/tFs1aLvObTclTwZ0JhXF2XcPT/zuaymemCDSuuUPXJJyqeDOg==",
"cpu": [
"s390x"
],
@@ -1353,9 +1408,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz",
- "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.28.1.tgz",
+ "integrity": "sha512-fzgeABz7rrAlKYB0y2kSEiURrI0691CSL0+KXwKwhxvj92VULEDQLpBYLHpF49MSiPG4sq5CK3qHMnb9tlCjBw==",
"cpu": [
"x64"
],
@@ -1366,9 +1421,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz",
- "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.28.1.tgz",
+ "integrity": "sha512-xQTDVzSGiMlSshpJCtudbWyRfLaNiVPXt1WgdWTwWz9n0U12cI2ZVtWe/Jgwyv/6wjL7b66uu61Vg0POWVfz4g==",
"cpu": [
"x64"
],
@@ -1379,9 +1434,9 @@
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz",
- "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.28.1.tgz",
+ "integrity": "sha512-wSXmDRVupJstFP7elGMgv+2HqXelQhuNf+IS4V+nUpNVi/GUiBgDmfwD0UGN3pcAnWsgKG3I52wMOBnk1VHr/A==",
"cpu": [
"arm64"
],
@@ -1392,9 +1447,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz",
- "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.28.1.tgz",
+ "integrity": "sha512-ZkyTJ/9vkgrE/Rk9vhMXhf8l9D+eAhbAVbsGsXKy2ohmJaWg0LPQLnIxRdRp/bKyr8tXuPlXhIoGlEB5XpJnGA==",
"cpu": [
"ia32"
],
@@ -1405,9 +1460,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz",
- "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.28.1.tgz",
+ "integrity": "sha512-ZvK2jBafvttJjoIdKm/Q/Bh7IJ1Ose9IBOwpOXcOvW3ikGTQGmKDgxTC6oCAzW6PynbkKP8+um1du81XJHZ0JA==",
"cpu": [
"x64"
],
@@ -1518,9 +1573,9 @@
"integrity": "sha512-qp3m9PPz4gULB9MhjGID7wpo3gJ4bTGXm7ltNDsmOvsPduTeHp8wSW9YckBj3mljeOh4F0m2z/0JKAALRKbmLQ=="
},
"node_modules/@types/estree": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
- "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
+ "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true
},
"node_modules/@types/json-schema": {
@@ -3369,41 +3424,42 @@
}
},
"node_modules/esbuild": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
- "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.0.tgz",
+ "integrity": "sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==",
"dev": true,
"hasInstallScript": true,
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
- "node": ">=12"
+ "node": ">=18"
},
"optionalDependencies": {
- "@esbuild/aix-ppc64": "0.21.5",
- "@esbuild/android-arm": "0.21.5",
- "@esbuild/android-arm64": "0.21.5",
- "@esbuild/android-x64": "0.21.5",
- "@esbuild/darwin-arm64": "0.21.5",
- "@esbuild/darwin-x64": "0.21.5",
- "@esbuild/freebsd-arm64": "0.21.5",
- "@esbuild/freebsd-x64": "0.21.5",
- "@esbuild/linux-arm": "0.21.5",
- "@esbuild/linux-arm64": "0.21.5",
- "@esbuild/linux-ia32": "0.21.5",
- "@esbuild/linux-loong64": "0.21.5",
- "@esbuild/linux-mips64el": "0.21.5",
- "@esbuild/linux-ppc64": "0.21.5",
- "@esbuild/linux-riscv64": "0.21.5",
- "@esbuild/linux-s390x": "0.21.5",
- "@esbuild/linux-x64": "0.21.5",
- "@esbuild/netbsd-x64": "0.21.5",
- "@esbuild/openbsd-x64": "0.21.5",
- "@esbuild/sunos-x64": "0.21.5",
- "@esbuild/win32-arm64": "0.21.5",
- "@esbuild/win32-ia32": "0.21.5",
- "@esbuild/win32-x64": "0.21.5"
+ "@esbuild/aix-ppc64": "0.24.0",
+ "@esbuild/android-arm": "0.24.0",
+ "@esbuild/android-arm64": "0.24.0",
+ "@esbuild/android-x64": "0.24.0",
+ "@esbuild/darwin-arm64": "0.24.0",
+ "@esbuild/darwin-x64": "0.24.0",
+ "@esbuild/freebsd-arm64": "0.24.0",
+ "@esbuild/freebsd-x64": "0.24.0",
+ "@esbuild/linux-arm": "0.24.0",
+ "@esbuild/linux-arm64": "0.24.0",
+ "@esbuild/linux-ia32": "0.24.0",
+ "@esbuild/linux-loong64": "0.24.0",
+ "@esbuild/linux-mips64el": "0.24.0",
+ "@esbuild/linux-ppc64": "0.24.0",
+ "@esbuild/linux-riscv64": "0.24.0",
+ "@esbuild/linux-s390x": "0.24.0",
+ "@esbuild/linux-x64": "0.24.0",
+ "@esbuild/netbsd-x64": "0.24.0",
+ "@esbuild/openbsd-arm64": "0.24.0",
+ "@esbuild/openbsd-x64": "0.24.0",
+ "@esbuild/sunos-x64": "0.24.0",
+ "@esbuild/win32-arm64": "0.24.0",
+ "@esbuild/win32-ia32": "0.24.0",
+ "@esbuild/win32-x64": "0.24.0"
}
},
"node_modules/escalade": {
@@ -6096,12 +6152,12 @@
"integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg=="
},
"node_modules/rollup": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz",
- "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.28.1.tgz",
+ "integrity": "sha512-61fXYl/qNVinKmGSTHAZ6Yy8I3YIJC/r2m9feHo6SwVAVcLT5MPwOUFe7EuURA/4m0NR8lXG4BBXuo/IZEsjMg==",
"dev": true,
"dependencies": {
- "@types/estree": "1.0.5"
+ "@types/estree": "1.0.6"
},
"bin": {
"rollup": "dist/bin/rollup"
@@ -6111,22 +6167,25 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
- "@rollup/rollup-android-arm-eabi": "4.22.4",
- "@rollup/rollup-android-arm64": "4.22.4",
- "@rollup/rollup-darwin-arm64": "4.22.4",
- "@rollup/rollup-darwin-x64": "4.22.4",
- "@rollup/rollup-linux-arm-gnueabihf": "4.22.4",
- "@rollup/rollup-linux-arm-musleabihf": "4.22.4",
- "@rollup/rollup-linux-arm64-gnu": "4.22.4",
- "@rollup/rollup-linux-arm64-musl": "4.22.4",
- "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4",
- "@rollup/rollup-linux-riscv64-gnu": "4.22.4",
- "@rollup/rollup-linux-s390x-gnu": "4.22.4",
- "@rollup/rollup-linux-x64-gnu": "4.22.4",
- "@rollup/rollup-linux-x64-musl": "4.22.4",
- "@rollup/rollup-win32-arm64-msvc": "4.22.4",
- "@rollup/rollup-win32-ia32-msvc": "4.22.4",
- "@rollup/rollup-win32-x64-msvc": "4.22.4",
+ "@rollup/rollup-android-arm-eabi": "4.28.1",
+ "@rollup/rollup-android-arm64": "4.28.1",
+ "@rollup/rollup-darwin-arm64": "4.28.1",
+ "@rollup/rollup-darwin-x64": "4.28.1",
+ "@rollup/rollup-freebsd-arm64": "4.28.1",
+ "@rollup/rollup-freebsd-x64": "4.28.1",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.28.1",
+ "@rollup/rollup-linux-arm-musleabihf": "4.28.1",
+ "@rollup/rollup-linux-arm64-gnu": "4.28.1",
+ "@rollup/rollup-linux-arm64-musl": "4.28.1",
+ "@rollup/rollup-linux-loongarch64-gnu": "4.28.1",
+ "@rollup/rollup-linux-powerpc64le-gnu": "4.28.1",
+ "@rollup/rollup-linux-riscv64-gnu": "4.28.1",
+ "@rollup/rollup-linux-s390x-gnu": "4.28.1",
+ "@rollup/rollup-linux-x64-gnu": "4.28.1",
+ "@rollup/rollup-linux-x64-musl": "4.28.1",
+ "@rollup/rollup-win32-arm64-msvc": "4.28.1",
+ "@rollup/rollup-win32-ia32-msvc": "4.28.1",
+ "@rollup/rollup-win32-x64-msvc": "4.28.1",
"fsevents": "~2.3.2"
}
},
@@ -6938,20 +6997,20 @@
}
},
"node_modules/vite": {
- "version": "5.4.11",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz",
- "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==",
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.3.tgz",
+ "integrity": "sha512-Cmuo5P0ENTN6HxLSo6IHsjCLn/81Vgrp81oaiFFMRa8gGDj5xEjIcEpf2ZymZtZR8oU0P2JX5WuUp/rlXcHkAw==",
"dev": true,
"dependencies": {
- "esbuild": "^0.21.3",
- "postcss": "^8.4.43",
- "rollup": "^4.20.0"
+ "esbuild": "^0.24.0",
+ "postcss": "^8.4.49",
+ "rollup": "^4.23.0"
},
"bin": {
"vite": "bin/vite.js"
},
"engines": {
- "node": "^18.0.0 || >=20.0.0"
+ "node": "^18.0.0 || ^20.0.0 || >=22.0.0"
},
"funding": {
"url": "https://github.com/vitejs/vite?sponsor=1"
@@ -6960,19 +7019,25 @@
"fsevents": "~2.3.3"
},
"peerDependencies": {
- "@types/node": "^18.0.0 || >=20.0.0",
+ "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
+ "jiti": ">=1.21.0",
"less": "*",
"lightningcss": "^1.21.0",
"sass": "*",
"sass-embedded": "*",
"stylus": "*",
"sugarss": "*",
- "terser": "^5.4.0"
+ "terser": "^5.16.0",
+ "tsx": "^4.8.1",
+ "yaml": "^2.4.2"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
},
+ "jiti": {
+ "optional": true
+ },
"less": {
"optional": true
},
@@ -6993,6 +7058,12 @@
},
"terser": {
"optional": true
+ },
+ "tsx": {
+ "optional": true
+ },
+ "yaml": {
+ "optional": true
}
}
},
@@ -7408,163 +7479,170 @@
}
},
"@esbuild/aix-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
- "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz",
+ "integrity": "sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==",
"dev": true,
"optional": true
},
"@esbuild/android-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
- "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.0.tgz",
+ "integrity": "sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==",
"dev": true,
"optional": true
},
"@esbuild/android-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
- "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz",
+ "integrity": "sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==",
"dev": true,
"optional": true
},
"@esbuild/android-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
- "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.0.tgz",
+ "integrity": "sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==",
"dev": true,
"optional": true
},
"@esbuild/darwin-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
- "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz",
+ "integrity": "sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==",
"dev": true,
"optional": true
},
"@esbuild/darwin-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
- "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz",
+ "integrity": "sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==",
"dev": true,
"optional": true
},
"@esbuild/freebsd-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
- "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz",
+ "integrity": "sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==",
"dev": true,
"optional": true
},
"@esbuild/freebsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
- "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz",
+ "integrity": "sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==",
"dev": true,
"optional": true
},
"@esbuild/linux-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
- "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz",
+ "integrity": "sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==",
"dev": true,
"optional": true
},
"@esbuild/linux-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
- "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz",
+ "integrity": "sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==",
"dev": true,
"optional": true
},
"@esbuild/linux-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
- "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz",
+ "integrity": "sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==",
"dev": true,
"optional": true
},
"@esbuild/linux-loong64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
- "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz",
+ "integrity": "sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==",
"dev": true,
"optional": true
},
"@esbuild/linux-mips64el": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
- "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz",
+ "integrity": "sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==",
"dev": true,
"optional": true
},
"@esbuild/linux-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
- "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz",
+ "integrity": "sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==",
"dev": true,
"optional": true
},
"@esbuild/linux-riscv64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
- "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz",
+ "integrity": "sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==",
"dev": true,
"optional": true
},
"@esbuild/linux-s390x": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
- "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz",
+ "integrity": "sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==",
"dev": true,
"optional": true
},
"@esbuild/linux-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
- "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz",
+ "integrity": "sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==",
"dev": true,
"optional": true
},
"@esbuild/netbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
- "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz",
+ "integrity": "sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==",
+ "dev": true,
+ "optional": true
+ },
+ "@esbuild/openbsd-arm64": {
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz",
+ "integrity": "sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==",
"dev": true,
"optional": true
},
"@esbuild/openbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
- "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz",
+ "integrity": "sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==",
"dev": true,
"optional": true
},
"@esbuild/sunos-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
- "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz",
+ "integrity": "sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==",
"dev": true,
"optional": true
},
"@esbuild/win32-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
- "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz",
+ "integrity": "sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==",
"dev": true,
"optional": true
},
"@esbuild/win32-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
- "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz",
+ "integrity": "sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==",
"dev": true,
"optional": true
},
"@esbuild/win32-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
- "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz",
+ "integrity": "sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==",
"dev": true,
"optional": true
},
@@ -8109,114 +8187,135 @@
}
},
"@rollup/rollup-android-arm-eabi": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz",
- "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.28.1.tgz",
+ "integrity": "sha512-2aZp8AES04KI2dy3Ss6/MDjXbwBzj+i0GqKtWXgw2/Ma6E4jJvujryO6gJAghIRVz7Vwr9Gtl/8na3nDUKpraQ==",
"dev": true,
"optional": true
},
"@rollup/rollup-android-arm64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz",
- "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.28.1.tgz",
+ "integrity": "sha512-EbkK285O+1YMrg57xVA+Dp0tDBRB93/BZKph9XhMjezf6F4TpYjaUSuPt5J0fZXlSag0LmZAsTmdGGqPp4pQFA==",
"dev": true,
"optional": true
},
"@rollup/rollup-darwin-arm64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz",
- "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.28.1.tgz",
+ "integrity": "sha512-prduvrMKU6NzMq6nxzQw445zXgaDBbMQvmKSJaxpaZ5R1QDM8w+eGxo6Y/jhT/cLoCvnZI42oEqf9KQNYz1fqQ==",
"dev": true,
"optional": true
},
"@rollup/rollup-darwin-x64": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz",
- "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.28.1.tgz",
+ "integrity": "sha512-WsvbOunsUk0wccO/TV4o7IKgloJ942hVFK1CLatwv6TJspcCZb9umQkPdvB7FihmdxgaKR5JyxDjWpCOp4uZlQ==",
+ "dev": true,
+ "optional": true
+ },
+ "@rollup/rollup-freebsd-arm64": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.28.1.tgz",
+ "integrity": "sha512-HTDPdY1caUcU4qK23FeeGxCdJF64cKkqajU0iBnTVxS8F7H/7BewvYoG+va1KPSL63kQ1PGNyiwKOfReavzvNA==",
+ "dev": true,
+ "optional": true
+ },
+ "@rollup/rollup-freebsd-x64": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.28.1.tgz",
+ "integrity": "sha512-m/uYasxkUevcFTeRSM9TeLyPe2QDuqtjkeoTpP9SW0XxUWfcYrGDMkO/m2tTw+4NMAF9P2fU3Mw4ahNvo7QmsQ==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz",
- "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.28.1.tgz",
+ "integrity": "sha512-QAg11ZIt6mcmzpNE6JZBpKfJaKkqTm1A9+y9O+frdZJEuhQxiugM05gnCWiANHj4RmbgeVJpTdmKRmH/a+0QbA==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz",
- "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.28.1.tgz",
+ "integrity": "sha512-dRP9PEBfolq1dmMcFqbEPSd9VlRuVWEGSmbxVEfiq2cs2jlZAl0YNxFzAQS2OrQmsLBLAATDMb3Z6MFv5vOcXg==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-arm64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz",
- "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.28.1.tgz",
+ "integrity": "sha512-uGr8khxO+CKT4XU8ZUH1TTEUtlktK6Kgtv0+6bIFSeiSlnGJHG1tSFSjm41uQ9sAO/5ULx9mWOz70jYLyv1QkA==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-arm64-musl": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz",
- "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.28.1.tgz",
+ "integrity": "sha512-QF54q8MYGAqMLrX2t7tNpi01nvq5RI59UBNx+3+37zoKX5KViPo/gk2QLhsuqok05sSCRluj0D00LzCwBikb0A==",
+ "dev": true,
+ "optional": true
+ },
+ "@rollup/rollup-linux-loongarch64-gnu": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.28.1.tgz",
+ "integrity": "sha512-vPul4uodvWvLhRco2w0GcyZcdyBfpfDRgNKU+p35AWEbJ/HPs1tOUrkSueVbBS0RQHAf/A+nNtDpvw95PeVKOA==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-powerpc64le-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz",
- "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.28.1.tgz",
+ "integrity": "sha512-pTnTdBuC2+pt1Rmm2SV7JWRqzhYpEILML4PKODqLz+C7Ou2apEV52h19CR7es+u04KlqplggmN9sqZlekg3R1A==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz",
- "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.28.1.tgz",
+ "integrity": "sha512-vWXy1Nfg7TPBSuAncfInmAI/WZDd5vOklyLJDdIRKABcZWojNDY0NJwruY2AcnCLnRJKSaBgf/GiJfauu8cQZA==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-s390x-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz",
- "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.28.1.tgz",
+ "integrity": "sha512-/yqC2Y53oZjb0yz8PVuGOQQNOTwxcizudunl/tFs1aLvObTclTwZ0JhXF2XcPT/zuaymemCDSuuUPXJJyqeDOg==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-x64-gnu": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz",
- "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.28.1.tgz",
+ "integrity": "sha512-fzgeABz7rrAlKYB0y2kSEiURrI0691CSL0+KXwKwhxvj92VULEDQLpBYLHpF49MSiPG4sq5CK3qHMnb9tlCjBw==",
"dev": true,
"optional": true
},
"@rollup/rollup-linux-x64-musl": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz",
- "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.28.1.tgz",
+ "integrity": "sha512-xQTDVzSGiMlSshpJCtudbWyRfLaNiVPXt1WgdWTwWz9n0U12cI2ZVtWe/Jgwyv/6wjL7b66uu61Vg0POWVfz4g==",
"dev": true,
"optional": true
},
"@rollup/rollup-win32-arm64-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz",
- "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.28.1.tgz",
+ "integrity": "sha512-wSXmDRVupJstFP7elGMgv+2HqXelQhuNf+IS4V+nUpNVi/GUiBgDmfwD0UGN3pcAnWsgKG3I52wMOBnk1VHr/A==",
"dev": true,
"optional": true
},
"@rollup/rollup-win32-ia32-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz",
- "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.28.1.tgz",
+ "integrity": "sha512-ZkyTJ/9vkgrE/Rk9vhMXhf8l9D+eAhbAVbsGsXKy2ohmJaWg0LPQLnIxRdRp/bKyr8tXuPlXhIoGlEB5XpJnGA==",
"dev": true,
"optional": true
},
"@rollup/rollup-win32-x64-msvc": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz",
- "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.28.1.tgz",
+ "integrity": "sha512-ZvK2jBafvttJjoIdKm/Q/Bh7IJ1Ose9IBOwpOXcOvW3ikGTQGmKDgxTC6oCAzW6PynbkKP8+um1du81XJHZ0JA==",
"dev": true,
"optional": true
},
@@ -8302,9 +8401,9 @@
"integrity": "sha512-qp3m9PPz4gULB9MhjGID7wpo3gJ4bTGXm7ltNDsmOvsPduTeHp8wSW9YckBj3mljeOh4F0m2z/0JKAALRKbmLQ=="
},
"@types/estree": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
- "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
+ "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true
},
"@types/json-schema": {
@@ -9614,34 +9713,35 @@
}
},
"esbuild": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
- "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
- "dev": true,
- "requires": {
- "@esbuild/aix-ppc64": "0.21.5",
- "@esbuild/android-arm": "0.21.5",
- "@esbuild/android-arm64": "0.21.5",
- "@esbuild/android-x64": "0.21.5",
- "@esbuild/darwin-arm64": "0.21.5",
- "@esbuild/darwin-x64": "0.21.5",
- "@esbuild/freebsd-arm64": "0.21.5",
- "@esbuild/freebsd-x64": "0.21.5",
- "@esbuild/linux-arm": "0.21.5",
- "@esbuild/linux-arm64": "0.21.5",
- "@esbuild/linux-ia32": "0.21.5",
- "@esbuild/linux-loong64": "0.21.5",
- "@esbuild/linux-mips64el": "0.21.5",
- "@esbuild/linux-ppc64": "0.21.5",
- "@esbuild/linux-riscv64": "0.21.5",
- "@esbuild/linux-s390x": "0.21.5",
- "@esbuild/linux-x64": "0.21.5",
- "@esbuild/netbsd-x64": "0.21.5",
- "@esbuild/openbsd-x64": "0.21.5",
- "@esbuild/sunos-x64": "0.21.5",
- "@esbuild/win32-arm64": "0.21.5",
- "@esbuild/win32-ia32": "0.21.5",
- "@esbuild/win32-x64": "0.21.5"
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.0.tgz",
+ "integrity": "sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==",
+ "dev": true,
+ "requires": {
+ "@esbuild/aix-ppc64": "0.24.0",
+ "@esbuild/android-arm": "0.24.0",
+ "@esbuild/android-arm64": "0.24.0",
+ "@esbuild/android-x64": "0.24.0",
+ "@esbuild/darwin-arm64": "0.24.0",
+ "@esbuild/darwin-x64": "0.24.0",
+ "@esbuild/freebsd-arm64": "0.24.0",
+ "@esbuild/freebsd-x64": "0.24.0",
+ "@esbuild/linux-arm": "0.24.0",
+ "@esbuild/linux-arm64": "0.24.0",
+ "@esbuild/linux-ia32": "0.24.0",
+ "@esbuild/linux-loong64": "0.24.0",
+ "@esbuild/linux-mips64el": "0.24.0",
+ "@esbuild/linux-ppc64": "0.24.0",
+ "@esbuild/linux-riscv64": "0.24.0",
+ "@esbuild/linux-s390x": "0.24.0",
+ "@esbuild/linux-x64": "0.24.0",
+ "@esbuild/netbsd-x64": "0.24.0",
+ "@esbuild/openbsd-arm64": "0.24.0",
+ "@esbuild/openbsd-x64": "0.24.0",
+ "@esbuild/sunos-x64": "0.24.0",
+ "@esbuild/win32-arm64": "0.24.0",
+ "@esbuild/win32-ia32": "0.24.0",
+ "@esbuild/win32-x64": "0.24.0"
}
},
"escalade": {
@@ -11544,28 +11644,31 @@
"integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg=="
},
"rollup": {
- "version": "4.22.4",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz",
- "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==",
- "dev": true,
- "requires": {
- "@rollup/rollup-android-arm-eabi": "4.22.4",
- "@rollup/rollup-android-arm64": "4.22.4",
- "@rollup/rollup-darwin-arm64": "4.22.4",
- "@rollup/rollup-darwin-x64": "4.22.4",
- "@rollup/rollup-linux-arm-gnueabihf": "4.22.4",
- "@rollup/rollup-linux-arm-musleabihf": "4.22.4",
- "@rollup/rollup-linux-arm64-gnu": "4.22.4",
- "@rollup/rollup-linux-arm64-musl": "4.22.4",
- "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4",
- "@rollup/rollup-linux-riscv64-gnu": "4.22.4",
- "@rollup/rollup-linux-s390x-gnu": "4.22.4",
- "@rollup/rollup-linux-x64-gnu": "4.22.4",
- "@rollup/rollup-linux-x64-musl": "4.22.4",
- "@rollup/rollup-win32-arm64-msvc": "4.22.4",
- "@rollup/rollup-win32-ia32-msvc": "4.22.4",
- "@rollup/rollup-win32-x64-msvc": "4.22.4",
- "@types/estree": "1.0.5",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.28.1.tgz",
+ "integrity": "sha512-61fXYl/qNVinKmGSTHAZ6Yy8I3YIJC/r2m9feHo6SwVAVcLT5MPwOUFe7EuURA/4m0NR8lXG4BBXuo/IZEsjMg==",
+ "dev": true,
+ "requires": {
+ "@rollup/rollup-android-arm-eabi": "4.28.1",
+ "@rollup/rollup-android-arm64": "4.28.1",
+ "@rollup/rollup-darwin-arm64": "4.28.1",
+ "@rollup/rollup-darwin-x64": "4.28.1",
+ "@rollup/rollup-freebsd-arm64": "4.28.1",
+ "@rollup/rollup-freebsd-x64": "4.28.1",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.28.1",
+ "@rollup/rollup-linux-arm-musleabihf": "4.28.1",
+ "@rollup/rollup-linux-arm64-gnu": "4.28.1",
+ "@rollup/rollup-linux-arm64-musl": "4.28.1",
+ "@rollup/rollup-linux-loongarch64-gnu": "4.28.1",
+ "@rollup/rollup-linux-powerpc64le-gnu": "4.28.1",
+ "@rollup/rollup-linux-riscv64-gnu": "4.28.1",
+ "@rollup/rollup-linux-s390x-gnu": "4.28.1",
+ "@rollup/rollup-linux-x64-gnu": "4.28.1",
+ "@rollup/rollup-linux-x64-musl": "4.28.1",
+ "@rollup/rollup-win32-arm64-msvc": "4.28.1",
+ "@rollup/rollup-win32-ia32-msvc": "4.28.1",
+ "@rollup/rollup-win32-x64-msvc": "4.28.1",
+ "@types/estree": "1.0.6",
"fsevents": "~2.3.2"
}
},
@@ -12144,15 +12247,15 @@
}
},
"vite": {
- "version": "5.4.11",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz",
- "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==",
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.3.tgz",
+ "integrity": "sha512-Cmuo5P0ENTN6HxLSo6IHsjCLn/81Vgrp81oaiFFMRa8gGDj5xEjIcEpf2ZymZtZR8oU0P2JX5WuUp/rlXcHkAw==",
"dev": true,
"requires": {
- "esbuild": "^0.21.3",
+ "esbuild": "^0.24.0",
"fsevents": "~2.3.3",
- "postcss": "^8.4.43",
- "rollup": "^4.20.0"
+ "postcss": "^8.4.49",
+ "rollup": "^4.23.0"
}
},
"vscode-uri": {
diff --git a/ui/package.json b/ui/package.json
index 2098cfc148c4..326b264bdcdd 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -30,7 +30,7 @@
"eslint": "^8.57.1",
"ts-node": "10.9.2",
"typescript": "^5.7.2",
- "vite": "5.4.11",
+ "vite": "6.0.3",
"vue-tsc": "^2.1.10"
}
}
From 8fd92fe5e6d49d6300b38f73ada6b662ecadc8a6 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 8 Dec 2024 20:00:43 -0500
Subject: [PATCH 40/92] Bump tailwindcss from 3.4.15 to 3.4.16 in /ui (#16271)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
ui/package-lock.json | 53 ++++++++++++++++----------------------------
ui/package.json | 2 +-
2 files changed, 20 insertions(+), 35 deletions(-)
diff --git a/ui/package-lock.json b/ui/package-lock.json
index c00d5d14f229..a058ce3fa58f 100644
--- a/ui/package-lock.json
+++ b/ui/package-lock.json
@@ -16,7 +16,7 @@
"axios": "1.7.4",
"lodash.debounce": "4.0.8",
"lodash.merge": "^4.6.2",
- "tailwindcss": "3.4.15",
+ "tailwindcss": "3.4.16",
"vue": "3.5.13",
"vue-router": "4.5.0"
},
@@ -4886,11 +4886,14 @@
}
},
"node_modules/lilconfig": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz",
- "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==",
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz",
+ "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==",
"engines": {
- "node": ">=10"
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/antonk52"
}
},
"node_modules/lines-and-columns": {
@@ -5899,17 +5902,6 @@
}
}
},
- "node_modules/postcss-load-config/node_modules/lilconfig": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz",
- "integrity": "sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==",
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/antonk52"
- }
- },
"node_modules/postcss-nested": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz",
@@ -6610,9 +6602,9 @@
}
},
"node_modules/tailwindcss": {
- "version": "3.4.15",
- "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.15.tgz",
- "integrity": "sha512-r4MeXnfBmSOuKUWmXe6h2CcyfzJCEk4F0pptO5jlnYSIViUkVmsawj80N5h2lO3gwcmSb4n3PuN+e+GC1Guylw==",
+ "version": "3.4.16",
+ "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.16.tgz",
+ "integrity": "sha512-TI4Cyx7gDiZ6r44ewaJmt0o6BrMCT5aK5e0rmJ/G9Xq3w7CX/5VXl/zIPEJZFUK5VEqwByyhqNPycPlvcK4ZNw==",
"dependencies": {
"@alloc/quick-lru": "^5.2.0",
"arg": "^5.0.2",
@@ -6623,7 +6615,7 @@
"glob-parent": "^6.0.2",
"is-glob": "^4.0.3",
"jiti": "^1.21.6",
- "lilconfig": "^2.1.0",
+ "lilconfig": "^3.1.3",
"micromatch": "^4.0.8",
"normalize-path": "^3.0.0",
"object-hash": "^3.0.0",
@@ -10763,9 +10755,9 @@
}
},
"lilconfig": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz",
- "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ=="
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz",
+ "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="
},
"lines-and-columns": {
"version": "1.2.4",
@@ -11480,13 +11472,6 @@
"requires": {
"lilconfig": "^3.0.0",
"yaml": "^2.3.4"
- },
- "dependencies": {
- "lilconfig": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz",
- "integrity": "sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow=="
- }
}
},
"postcss-nested": {
@@ -11959,9 +11944,9 @@
}
},
"tailwindcss": {
- "version": "3.4.15",
- "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.15.tgz",
- "integrity": "sha512-r4MeXnfBmSOuKUWmXe6h2CcyfzJCEk4F0pptO5jlnYSIViUkVmsawj80N5h2lO3gwcmSb4n3PuN+e+GC1Guylw==",
+ "version": "3.4.16",
+ "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.16.tgz",
+ "integrity": "sha512-TI4Cyx7gDiZ6r44ewaJmt0o6BrMCT5aK5e0rmJ/G9Xq3w7CX/5VXl/zIPEJZFUK5VEqwByyhqNPycPlvcK4ZNw==",
"requires": {
"@alloc/quick-lru": "^5.2.0",
"arg": "^5.0.2",
@@ -11972,7 +11957,7 @@
"glob-parent": "^6.0.2",
"is-glob": "^4.0.3",
"jiti": "^1.21.6",
- "lilconfig": "^2.1.0",
+ "lilconfig": "^3.1.3",
"micromatch": "^4.0.8",
"normalize-path": "^3.0.0",
"object-hash": "^3.0.0",
diff --git a/ui/package.json b/ui/package.json
index 326b264bdcdd..9f2437f8a05e 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -18,7 +18,7 @@
"axios": "1.7.4",
"lodash.debounce": "4.0.8",
"lodash.merge": "^4.6.2",
- "tailwindcss": "3.4.15",
+ "tailwindcss": "3.4.16",
"vue": "3.5.13",
"vue-router": "4.5.0"
},
From 8b740d4cd5dc3bd0c6e5b4378873af5d3430ca77 Mon Sep 17 00:00:00 2001
From: Jean Luciano
Date: Mon, 9 Dec 2024 09:58:48 -0600
Subject: [PATCH 41/92] OTEL flow run context propagation with Labels (#16122)
Co-authored-by: Chris Pickett
---
docs/mint.json | 1 +
.../flow-runs/update-flow-run-labels.mdx | 3 +
docs/v3/api-ref/rest-api/server/schema.json | 60 ++++++++++++++
src/prefect/client/orchestration.py | 25 ++++++
src/prefect/client/schemas/actions.py | 9 +-
src/prefect/client/schemas/objects.py | 2 +-
src/prefect/flow_engine.py | 67 ++++++++++++---
src/prefect/server/api/flow_runs.py | 15 ++++
src/prefect/server/models/flow_runs.py | 43 ++++++++++
src/prefect/telemetry/run_telemetry.py | 42 ++++++++--
tests/server/models/test_flow_runs.py | 74 +++++++++++++++++
tests/telemetry/test_instrumentation.py | 2 +-
tests/test_flow_engine.py | 83 ++++++++++++++++++-
ui-v2/src/api/prefect.ts | 56 +++++++++++++
14 files changed, 456 insertions(+), 26 deletions(-)
create mode 100644 docs/v3/api-ref/rest-api/server/flow-runs/update-flow-run-labels.mdx
diff --git a/docs/mint.json b/docs/mint.json
index c65644f34a09..456c9e75b15c 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -445,6 +445,7 @@
"v3/api-ref/rest-api/server/flow-runs/delete-flow-run-input",
"v3/api-ref/rest-api/server/flow-runs/paginate-flow-runs",
"v3/api-ref/rest-api/server/flow-runs/download-logs",
+ "v3/api-ref/rest-api/server/flow-runs/update-flow-run-labels",
"v3/api-ref/rest-api/server/flow-runs/read-flow-run-history",
"v3/api-ref/rest-api/server/flow-runs/count-task-runs-by-flow-run"
]
diff --git a/docs/v3/api-ref/rest-api/server/flow-runs/update-flow-run-labels.mdx b/docs/v3/api-ref/rest-api/server/flow-runs/update-flow-run-labels.mdx
new file mode 100644
index 000000000000..482191e872c6
--- /dev/null
+++ b/docs/v3/api-ref/rest-api/server/flow-runs/update-flow-run-labels.mdx
@@ -0,0 +1,3 @@
+---
+openapi: patch /api/flow_runs/{id}/labels
+---
\ No newline at end of file
diff --git a/docs/v3/api-ref/rest-api/server/schema.json b/docs/v3/api-ref/rest-api/server/schema.json
index 694f7648d060..62986c65c961 100644
--- a/docs/v3/api-ref/rest-api/server/schema.json
+++ b/docs/v3/api-ref/rest-api/server/schema.json
@@ -1521,6 +1521,66 @@
}
}
},
+ "/api/flow_runs/{id}/labels": {
+ "patch": {
+ "tags": [
+ "Flow Runs"
+ ],
+ "summary": "Update Flow Run Labels",
+ "description": "Update the labels of a flow run.",
+ "operationId": "update_flow_run_labels_flow_runs__id__labels_patch",
+ "parameters": [
+ {
+ "name": "id",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid",
+ "description": "The flow run id",
+ "title": "Id"
+ },
+ "description": "The flow run id"
+ },
+ {
+ "name": "x-prefect-api-version",
+ "in": "header",
+ "required": false,
+ "schema": {
+ "type": "string",
+ "title": "X-Prefect-Api-Version"
+ }
+ }
+ ],
+ "requestBody": {
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "description": "The labels to update",
+ "title": "Labels"
+ }
+ }
+ }
+ },
+ "responses": {
+ "204": {
+ "description": "Successful Response"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
"/api/task_runs/": {
"post": {
"tags": [
diff --git a/src/prefect/client/orchestration.py b/src/prefect/client/orchestration.py
index f244ccde5708..bfcc6232192a 100644
--- a/src/prefect/client/orchestration.py
+++ b/src/prefect/client/orchestration.py
@@ -127,6 +127,7 @@
PREFECT_TESTING_UNIT_TEST_MODE,
get_current_settings,
)
+from prefect.types import KeyValueLabelsField
if TYPE_CHECKING:
from prefect.flows import Flow as FlowObject
@@ -3461,6 +3462,18 @@ async def raise_for_api_version_mismatch(self) -> None:
f"Major versions must match."
)
+ async def update_flow_run_labels(
+ self, flow_run_id: UUID, labels: KeyValueLabelsField
+ ) -> None:
+ """
+ Updates the labels of a flow run.
+ """
+
+ response = await self._client.patch(
+ f"/flow_runs/{flow_run_id}/labels", json=labels
+ )
+ response.raise_for_status()
+
async def __aenter__(self) -> Self:
"""
Start the client.
@@ -4372,3 +4385,15 @@ def decrement_v1_concurrency_slots(
"task_run_id": str(task_run_id),
},
)
+
+ def update_flow_run_labels(
+ self, flow_run_id: UUID, labels: KeyValueLabelsField
+ ) -> None:
+ """
+ Updates the labels of a flow run.
+ """
+ response = self._client.patch(
+ f"/flow_runs/{flow_run_id}/labels",
+ json=labels,
+ )
+ response.raise_for_status()
diff --git a/src/prefect/client/schemas/actions.py b/src/prefect/client/schemas/actions.py
index 6f17c7cd8cc8..659c5153d46a 100644
--- a/src/prefect/client/schemas/actions.py
+++ b/src/prefect/client/schemas/actions.py
@@ -393,10 +393,11 @@ class DeploymentFlowRunCreate(ActionBaseModel):
default_factory=objects.FlowRunPolicy
)
tags: list[str] = Field(default_factory=list)
- idempotency_key: Optional[str] = Field(default=None)
- parent_task_run_id: Optional[UUID] = Field(default=None)
- work_queue_name: Optional[str] = Field(default=None)
- job_variables: Optional[dict[str, Any]] = Field(default=None)
+ idempotency_key: Optional[str] = Field(None)
+ parent_task_run_id: Optional[UUID] = Field(None)
+ work_queue_name: Optional[str] = Field(None)
+ job_variables: Optional[dict] = Field(None)
+ labels: KeyValueLabelsField = Field(default_factory=dict)
class SavedSearchCreate(ActionBaseModel):
diff --git a/src/prefect/client/schemas/objects.py b/src/prefect/client/schemas/objects.py
index 087cd5b78ee3..ef5eab667ad1 100644
--- a/src/prefect/client/schemas/objects.py
+++ b/src/prefect/client/schemas/objects.py
@@ -578,7 +578,7 @@ class FlowRun(ObjectBaseModel):
description="A list of tags on the flow run",
examples=[["tag-1", "tag-2"]],
)
- labels: KeyValueLabelsField
+ labels: KeyValueLabelsField = Field(default_factory=dict)
parent_task_run_id: Optional[UUID] = Field(
default=None,
description=(
diff --git a/src/prefect/flow_engine.py b/src/prefect/flow_engine.py
index fb5ec172ab4e..43bf55e21c41 100644
--- a/src/prefect/flow_engine.py
+++ b/src/prefect/flow_engine.py
@@ -2,7 +2,7 @@
import logging
import os
import time
-from contextlib import ExitStack, asynccontextmanager, contextmanager
+from contextlib import ExitStack, asynccontextmanager, contextmanager, nullcontext
from dataclasses import dataclass, field
from typing import (
Any,
@@ -23,7 +23,7 @@
from uuid import UUID
from anyio import CancelScope
-from opentelemetry import trace
+from opentelemetry import propagate, trace
from opentelemetry.trace import Tracer, get_tracer
from typing_extensions import ParamSpec
@@ -72,6 +72,8 @@
exception_to_failed_state,
return_value_to_state,
)
+from prefect.telemetry.run_telemetry import OTELSetter
+from prefect.types import KeyValueLabels
from prefect.utilities.annotations import NotSet
from prefect.utilities.asyncutils import run_coro_as_sync
from prefect.utilities.callables import (
@@ -94,6 +96,8 @@
P = ParamSpec("P")
R = TypeVar("R")
+LABELS_TRACEPARENT_KEY = "__OTEL_TRACEPARENT"
+TRACEPARENT_KEY = "traceparent"
class FlowRunTimeoutError(TimeoutError):
@@ -178,6 +182,37 @@ def cancel_all_tasks(self):
if hasattr(self.flow.task_runner, "cancel_all"):
self.flow.task_runner.cancel_all() # type: ignore
+ def _update_otel_labels(
+ self, span: trace.Span, client: Union[SyncPrefectClient, PrefectClient]
+ ):
+ parent_flow_run_ctx = FlowRunContext.get()
+ if parent_flow_run_ctx and parent_flow_run_ctx.flow_run:
+ if traceparent := parent_flow_run_ctx.flow_run.labels.get(
+ LABELS_TRACEPARENT_KEY
+ ):
+ carrier: KeyValueLabels = {TRACEPARENT_KEY: traceparent}
+ propagate.get_global_textmap().inject(
+ carrier={TRACEPARENT_KEY: traceparent},
+ setter=OTELSetter(),
+ )
+ else:
+ carrier: KeyValueLabels = {}
+ propagate.get_global_textmap().inject(
+ carrier,
+ context=trace.set_span_in_context(span),
+ setter=OTELSetter(),
+ )
+ if carrier.get(TRACEPARENT_KEY):
+ if self.flow_run:
+ client.update_flow_run_labels(
+ flow_run_id=self.flow_run.id,
+ labels={LABELS_TRACEPARENT_KEY: carrier[TRACEPARENT_KEY]},
+ )
+ else:
+ self.logger.info(
+ f"Tried to set traceparent {carrier[TRACEPARENT_KEY]} for flow run, but None was found"
+ )
+
@dataclass
class FlowRunEngine(BaseFlowRunEngine[P, R]):
@@ -283,7 +318,7 @@ def set_state(self, state: State, force: bool = False) -> State:
if self._span:
self._span.add_event(
- state.name,
+ state.name or state.type,
{
"prefect.state.message": state.message or "",
"prefect.state.type": state.type,
@@ -402,7 +437,7 @@ def handle_crash(self, exc: BaseException) -> None:
self.set_state(state, force=True)
self._raised = exc
- self._end_span_on_error(exc, state.message)
+ self._end_span_on_error(exc, state.message if state else "")
def load_subflow_run(
self,
@@ -647,7 +682,7 @@ def initialize_run(self):
empirical_policy=self.flow_run.empirical_policy,
)
- self._span = self._tracer.start_span(
+ span = self._tracer.start_span(
name=self.flow_run.name,
attributes={
**self.flow_run.labels,
@@ -657,6 +692,9 @@ def initialize_run(self):
"prefect.flow.name": self.flow.name,
},
)
+ self._update_otel_labels(span, self.client)
+
+ self._span = span
try:
yield self
@@ -698,12 +736,13 @@ def initialize_run(self):
@contextmanager
def start(self) -> Generator[None, None, None]:
- with self.initialize_run(), trace.use_span(self._span):
- self.begin_run()
+ with self.initialize_run():
+ with trace.use_span(self._span) if self._span else nullcontext():
+ self.begin_run()
- if self.state.is_running():
- self.call_hooks()
- yield
+ if self.state.is_running():
+ self.call_hooks()
+ yield
@contextmanager
def run_context(self):
@@ -856,7 +895,7 @@ async def set_state(self, state: State, force: bool = False) -> State:
if self._span:
self._span.add_event(
- state.name,
+ state.name or state.type,
{
"prefect.state.message": state.message or "",
"prefect.state.type": state.type,
@@ -1217,7 +1256,7 @@ async def initialize_run(self):
empirical_policy=self.flow_run.empirical_policy,
)
- self._span = self._tracer.start_span(
+ span = self._tracer.start_span(
name=self.flow_run.name,
attributes={
**self.flow_run.labels,
@@ -1227,6 +1266,8 @@ async def initialize_run(self):
"prefect.flow.name": self.flow.name,
},
)
+ self._update_otel_labels(span, self.client)
+ self._span = span
try:
yield self
@@ -1269,7 +1310,7 @@ async def initialize_run(self):
@asynccontextmanager
async def start(self) -> AsyncGenerator[None, None]:
async with self.initialize_run():
- with trace.use_span(self._span):
+ with trace.use_span(self._span) if self._span else nullcontext():
await self.begin_run()
if self.state.is_running():
diff --git a/src/prefect/server/api/flow_runs.py b/src/prefect/server/api/flow_runs.py
index 7473a815ba29..338193b13f3e 100644
--- a/src/prefect/server/api/flow_runs.py
+++ b/src/prefect/server/api/flow_runs.py
@@ -838,3 +838,18 @@ async def generate():
"Content-Disposition": f"attachment; filename={flow_run.name}-logs.csv"
},
)
+
+
+@router.patch("/{id}/labels", status_code=status.HTTP_204_NO_CONTENT)
+async def update_flow_run_labels(
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
+ labels: Dict[str, Any] = Body(..., description="The labels to update"),
+ db: PrefectDBInterface = Depends(provide_database_interface),
+):
+ """
+ Update the labels of a flow run.
+ """
+ async with db.session_context(begin_transaction=True) as session:
+ await models.flow_runs.update_flow_run_labels(
+ session=session, flow_run_id=flow_run_id, labels=labels
+ )
diff --git a/src/prefect/server/models/flow_runs.py b/src/prefect/server/models/flow_runs.py
index fbb6f522f61b..5db2ff750956 100644
--- a/src/prefect/server/models/flow_runs.py
+++ b/src/prefect/server/models/flow_runs.py
@@ -29,6 +29,7 @@
import prefect.server.models as models
import prefect.server.schemas as schemas
+from prefect.logging.loggers import get_logger
from prefect.server.database import orm_models
from prefect.server.database.dependencies import db_injector
from prefect.server.database.interface import PrefectDBInterface
@@ -46,6 +47,10 @@
PREFECT_API_MAX_FLOW_RUN_GRAPH_ARTIFACTS,
PREFECT_API_MAX_FLOW_RUN_GRAPH_NODES,
)
+from prefect.types import KeyValueLabels
+
+logger = get_logger("flow_runs")
+
T = TypeVar("T", bound=tuple)
@@ -633,3 +638,41 @@ async def with_system_labels_for_flow_run(
)
return parent_labels | default_labels | user_supplied_labels
+
+
+async def update_flow_run_labels(
+ session: AsyncSession,
+ flow_run_id: UUID,
+ labels: KeyValueLabels,
+) -> bool:
+ """
+ Update flow run labels by patching existing labels with new values.
+ Args:
+ session: A database session
+ flow_run_id: the flow run id to update
+ labels: the new labels to patch into existing labels
+ Returns:
+ bool: whether the update was successful
+ """
+ # First read the existing flow run to get current labels
+ flow_run: Optional[orm_models.FlowRun] = await read_flow_run(session, flow_run_id)
+ if not flow_run:
+ raise ObjectNotFoundError(f"Flow run with id {flow_run_id} not found")
+
+ # Merge existing labels with new labels
+ current_labels = flow_run.labels or {}
+ updated_labels = {**current_labels, **labels}
+
+ try:
+ # Update the flow run with merged labels
+ result = await session.execute(
+ sa.update(orm_models.FlowRun)
+ .where(orm_models.FlowRun.id == flow_run_id)
+ .values(labels=updated_labels)
+ )
+ success = result.rowcount > 0
+ if success:
+ await session.commit() # Explicitly commit
+ return success
+ except Exception:
+ raise
diff --git a/src/prefect/telemetry/run_telemetry.py b/src/prefect/telemetry/run_telemetry.py
index bc2c36fc68ab..08de1a2ebd0b 100644
--- a/src/prefect/telemetry/run_telemetry.py
+++ b/src/prefect/telemetry/run_telemetry.py
@@ -2,6 +2,7 @@
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Dict, Optional
+from opentelemetry.propagators.textmap import Setter
from opentelemetry.trace import (
Status,
StatusCode,
@@ -11,13 +12,27 @@
import prefect
from prefect.client.schemas import TaskRun
from prefect.client.schemas.objects import State
+from prefect.types import KeyValueLabels
if TYPE_CHECKING:
- from opentelemetry.sdk.trace import Tracer
+ from opentelemetry.trace import Tracer
+
+
+class OTELSetter(Setter[KeyValueLabels]):
+ """
+ A setter for OpenTelemetry that supports Prefect's custom labels.
+ """
+
+ def set(self, carrier: KeyValueLabels, key: str, value: str) -> None:
+ carrier[key] = value
@dataclass
class RunTelemetry:
+ """
+ A class for managing the telemetry of runs.
+ """
+
_tracer: "Tracer" = field(
default_factory=lambda: get_tracer("prefect", prefect.__version__)
)
@@ -29,6 +44,9 @@ def start_span(
parameters: Optional[Dict[str, Any]] = None,
labels: Optional[Dict[str, Any]] = None,
):
+ """
+ Start a span for a task run.
+ """
if parameters is None:
parameters = {}
if labels is None:
@@ -48,26 +66,38 @@ def start_span(
},
)
- def end_span_on_success(self, terminal_message: str):
+ def end_span_on_success(self, terminal_message: str) -> None:
+ """
+ End a span for a task run on success.
+ """
if self._span:
self._span.set_status(Status(StatusCode.OK), terminal_message)
self._span.end(time.time_ns())
self._span = None
- def end_span_on_failure(self, terminal_message: str):
+ def end_span_on_failure(self, terminal_message: str) -> None:
+ """
+ End a span for a task run on failure.
+ """
if self._span:
self._span.set_status(Status(StatusCode.ERROR, terminal_message))
self._span.end(time.time_ns())
self._span = None
- def record_exception(self, exc: Exception):
+ def record_exception(self, exc: Exception) -> None:
+ """
+ Record an exception on a span.
+ """
if self._span:
self._span.record_exception(exc)
- def update_state(self, new_state: State):
+ def update_state(self, new_state: State) -> None:
+ """
+ Update a span with the state of a task run.
+ """
if self._span:
self._span.add_event(
- new_state.name,
+ new_state.name or new_state.type,
{
"prefect.state.message": new_state.message or "",
"prefect.state.type": new_state.type,
diff --git a/tests/server/models/test_flow_runs.py b/tests/server/models/test_flow_runs.py
index 7b68055cbd23..1023d329baa9 100644
--- a/tests/server/models/test_flow_runs.py
+++ b/tests/server/models/test_flow_runs.py
@@ -3,10 +3,13 @@
import pendulum
import pytest
import sqlalchemy as sa
+from sqlalchemy.ext.asyncio import AsyncSession
from prefect.server import models, schemas
+from prefect.server.database import orm_models
from prefect.server.exceptions import ObjectNotFoundError
from prefect.server.schemas.core import TaskRunResult
+from prefect.types import KeyValueLabels
class TestCreateFlowRun:
@@ -255,6 +258,77 @@ async def test_update_flow_run_returns_false_if_flow_run_does_not_exist(
)
)
+ async def test_update_flow_run_labels(
+ self, flow: orm_models.Flow, session: AsyncSession
+ ):
+ """Test that flow run labels can be updated by patching existing labels"""
+
+ # Create a flow run with initial labels
+ initial_labels: KeyValueLabels = {"env": "test", "version": "1.0"}
+ flow_run = await models.flow_runs.create_flow_run(
+ session=session,
+ flow_run=schemas.core.FlowRun(flow_id=flow.id, labels=initial_labels),
+ )
+
+ # Update with new labels
+ new_labels: KeyValueLabels = {"version": "2.0", "new_key": "new_value"}
+ update_success = await models.flow_runs.update_flow_run_labels(
+ session=session, flow_run_id=flow_run.id, labels=new_labels
+ )
+ assert update_success is True
+
+ # Read the flow run back and verify labels were merged correctly
+ updated_flow_run = await models.flow_runs.read_flow_run(
+ session=session, flow_run_id=flow_run.id
+ )
+ assert updated_flow_run
+ assert updated_flow_run.labels == {
+ "prefect.flow.id": str(flow.id),
+ "env": "test", # Kept from initial labels
+ "version": "2.0", # Updated from new labels
+ "new_key": "new_value", # Added from new labels
+ }
+
+ async def test_update_flow_run_labels_raises_if_flow_run_does_not_exist(
+ self, session: AsyncSession, caplog: pytest.LogCaptureFixture
+ ):
+ """Test that updating labels for a non-existent flow run raises"""
+ with pytest.raises(ObjectNotFoundError) as exc:
+ await models.flow_runs.update_flow_run_labels(
+ session=session, flow_run_id=uuid4(), labels={"test": "label"}
+ )
+ assert "Flow run with id" in str(exc.value)
+
+ async def test_update_flow_run_labels_with_empty_initial_labels(
+ self, flow: orm_models.Flow, session: AsyncSession
+ ):
+ """Test that labels can be added to a flow run with no existing labels"""
+
+ # Create a flow run with no labels
+ flow_run = await models.flow_runs.create_flow_run(
+ session=session,
+ flow_run=schemas.core.FlowRun(
+ flow_id=flow.id,
+ ),
+ )
+
+ # Update with new labels
+ new_labels: KeyValueLabels = {"env": "test", "version": "1.0"}
+ update_success = await models.flow_runs.update_flow_run_labels(
+ session=session, flow_run_id=flow_run.id, labels=new_labels
+ )
+ assert update_success is True
+
+ # Read the flow run back and verify labels were added
+ updated_flow_run = await models.flow_runs.read_flow_run(
+ session=session, flow_run_id=flow_run.id
+ )
+ assert updated_flow_run
+ assert updated_flow_run.labels == {
+ "prefect.flow.id": str(flow.id),
+ **new_labels,
+ }
+
class TestReadFlowRun:
async def test_read_flow_run(self, flow, session):
diff --git a/tests/telemetry/test_instrumentation.py b/tests/telemetry/test_instrumentation.py
index 1ea74ff55872..6fae35a96895 100644
--- a/tests/telemetry/test_instrumentation.py
+++ b/tests/telemetry/test_instrumentation.py
@@ -211,7 +211,7 @@ def sync_task(x: int, y: int):
instrumentation.assert_has_attributes(
span, {"prefect.run.id": str(task_run_id), "prefect.run.type": "task"}
)
- assert spans[0].name == task_fn.__name__
+ assert spans[0].name == task_fn.name
async def test_span_attributes(self, engine_type, instrumentation):
@task
diff --git a/tests/test_flow_engine.py b/tests/test_flow_engine.py
index d1bc1a9bbdfd..9807f8c219cd 100644
--- a/tests/test_flow_engine.py
+++ b/tests/test_flow_engine.py
@@ -641,7 +641,6 @@ async def parent_flow():
assert await state.result() == "hello"
assert flow_run_count == 2
assert child_run_count == 2, "Child flow should be reset and run again"
-
# Ensure that the tracking task run for the subflow is reset and tracked
task_runs = sync_prefect_client.read_task_runs(
flow_run_filter=FlowRunFilter(
@@ -1955,6 +1954,7 @@ def instrumented_flow():
assert len(spans) == 1
span = spans[0]
assert span is not None
+ instrumentation.assert_span_instrumented_for(span, prefect)
instrumentation.assert_has_attributes(
span,
@@ -2094,3 +2094,84 @@ def a_slow_flow():
"exception.escaped": "False",
},
)
+
+ async def test_flow_run_propagates_otel_traceparent_to_subflow(
+ self, instrumentation: InstrumentationTester
+ ):
+ """Test that OTEL traceparent gets propagated from parent flow to child flow"""
+
+ @flow
+ def child_flow():
+ return "hello from child"
+
+ @flow
+ def parent_flow():
+ flow_run_ctx = FlowRunContext.get()
+ assert flow_run_ctx
+ assert flow_run_ctx.flow_run
+ flow_run = flow_run_ctx.flow_run
+ mock_traceparent = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"
+ flow_run.labels["__OTEL_TRACEPARENT"] = mock_traceparent
+
+ return child_flow()
+
+ parent_flow()
+
+ spans = instrumentation.get_finished_spans()
+
+ parent_span = next(
+ span
+ for span in spans
+ if span.attributes
+ and span.attributes.get("prefect.flow.name") == "parent-flow"
+ )
+ child_span = next(
+ span
+ for span in spans
+ if span.attributes
+ and span.attributes.get("prefect.flow.name") == "child-flow"
+ )
+
+ assert parent_span is not None
+ assert child_span is not None
+ assert child_span.context and parent_span.context
+ assert child_span.context.trace_id == parent_span.context.trace_id
+
+ async def test_flow_run_creates_and_stores_otel_traceparent(
+ self, instrumentation: InstrumentationTester, sync_prefect_client
+ ):
+ """Test that when no parent traceparent exists, the flow run stores its own span's traceparent"""
+
+ @flow
+ def child_flow():
+ return "hello from child"
+
+ @flow
+ def parent_flow():
+ return child_flow()
+
+ parent_flow()
+
+ spans = instrumentation.get_finished_spans()
+
+ next(
+ span
+ for span in spans
+ if span.attributes
+ and span.attributes.get("prefect.flow.name") == "parent-flow"
+ )
+ child_span = next(
+ span
+ for span in spans
+ if span.attributes
+ and span.attributes.get("prefect.flow.name") == "child-flow"
+ )
+
+ child_flow_run_id = child_span.attributes.get("prefect.run.id")
+ assert child_flow_run_id
+ child_flow_run = sync_prefect_client.read_flow_run(UUID(child_flow_run_id))
+
+ assert "__OTEL_TRACEPARENT" in child_flow_run.labels
+ assert child_flow_run.labels["__OTEL_TRACEPARENT"].startswith("00-")
+ trace_id_hex = child_flow_run.labels["__OTEL_TRACEPARENT"].split("-")[1]
+ assert int(trace_id_hex, 16) == child_span.context.trace_id
diff --git a/ui-v2/src/api/prefect.ts b/ui-v2/src/api/prefect.ts
index 4879b49f1811..bcbff9e8b841 100644
--- a/ui-v2/src/api/prefect.ts
+++ b/ui-v2/src/api/prefect.ts
@@ -482,6 +482,26 @@ export interface paths {
patch?: never;
trace?: never;
};
+ "/flow_runs/{id}/labels": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ get?: never;
+ put?: never;
+ post?: never;
+ delete?: never;
+ options?: never;
+ head?: never;
+ /**
+ * Update Flow Run Labels
+ * @description Update the labels of a flow run.
+ */
+ patch: operations["update_flow_run_labels_flow_runs__id__labels_patch"];
+ trace?: never;
+ };
"/task_runs/": {
parameters: {
query?: never;
@@ -10460,6 +10480,42 @@ export interface operations {
};
};
};
+ update_flow_run_labels_flow_runs__id__labels_patch: {
+ parameters: {
+ query?: never;
+ header?: {
+ "x-prefect-api-version"?: string;
+ };
+ path: {
+ /** @description The flow run id */
+ id: string;
+ };
+ cookie?: never;
+ };
+ requestBody: {
+ content: {
+ "application/json": Record;
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 204: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content?: never;
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
create_task_run_task_runs__post: {
parameters: {
query?: never;
From 059f0c2c2f7e841f02dfb4af5e4acafb59f3f4cc Mon Sep 17 00:00:00 2001
From: Alexander Streed
Date: Mon, 9 Dec 2024 10:07:19 -0600
Subject: [PATCH 42/92] Link Docker build `NODE_VERSION` to `.nvmrc` (#16282)
---
.github/workflows/docker-images.yaml | 6 ++++++
.github/workflows/python-tests.yaml | 6 ++++++
.nvmrc | 2 +-
Dockerfile | 2 +-
4 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/docker-images.yaml b/.github/workflows/docker-images.yaml
index 44ec28968e88..2630f5155626 100644
--- a/.github/workflows/docker-images.yaml
+++ b/.github/workflows/docker-images.yaml
@@ -101,6 +101,11 @@ jobs:
flavor: |
latest=false
+ - name: Get node version
+ id: get_node_version
+ run: |
+ echo "NODE_VERSION=$(cat .nvmrc)" >> $GITHUB_OUTPUT
+
- name: Build and push image
uses: docker/build-push-action@v6
with:
@@ -108,6 +113,7 @@ jobs:
platforms: linux/amd64,linux/arm64
build-args: |
PYTHON_VERSION=${{ matrix.python-version }}
+ NODE_VERSION=${{ steps.get_node_version.outputs.NODE_VERSION }}
${{ ( endsWith(matrix.flavor, 'conda') && 'BASE_IMAGE=prefect-conda' ) || '' }}
${{ ( endsWith(matrix.flavor, 'kubernetes') && 'PREFECT_EXTRAS=[kubernetes]' ) || '' }}
tags: ${{ join(steps.metadata-dev.outputs.tags) }},${{ join(steps.metadata-prod.outputs.tags) }}
diff --git a/.github/workflows/python-tests.yaml b/.github/workflows/python-tests.yaml
index 26d344a44641..ea0230832677 100644
--- a/.github/workflows/python-tests.yaml
+++ b/.github/workflows/python-tests.yaml
@@ -266,6 +266,11 @@ jobs:
tmp="sha-$SHORT_SHA-python${{ matrix.python-version }}"
echo "image_tag=${tmp}" >> $GITHUB_OUTPUT
+ - name: Get node version
+ id: get_node_version
+ run: |
+ echo "NODE_VERSION=$(cat .nvmrc)" >> $GITHUB_OUTPUT
+
- name: Login to DockerHub
uses: docker/login-action@v3
if: github.event.pull_request.head.repo.full_name == github.repository
@@ -283,6 +288,7 @@ jobs:
build-args: |
PYTHON_VERSION=${{ matrix.python-version }}
PREFECT_EXTRAS=[dev]
+ NODE_VERSION=${{ steps.get_node_version.outputs.NODE_VERSION }}
tags: prefecthq/prefect-dev:${{ steps.get_image_tag.outputs.image_tag }}
outputs: type=docker,dest=/tmp/image.tar
diff --git a/.nvmrc b/.nvmrc
index 6aab9b43fa34..02c8b485edb5 100644
--- a/.nvmrc
+++ b/.nvmrc
@@ -1 +1 @@
-v18.18.0
+18.18.0
diff --git a/Dockerfile b/Dockerfile
index 75abd96e188b..9534db5507a4 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,7 +9,7 @@ ARG BASE_IMAGE=python:${PYTHON_VERSION}-slim
# The version used to build the Python distributable.
ARG BUILD_PYTHON_VERSION=3.9
# THe version used to build the UI distributable.
-ARG NODE_VERSION=16.15
+ARG NODE_VERSION=18.18.0
# Any extra Python requirements to install
ARG EXTRA_PIP_PACKAGES=""
From ee816ad2d20e1059fd04973e466f3ca1d95427f2 Mon Sep 17 00:00:00 2001
From: Nicholas Brown
Date: Mon, 9 Dec 2024 11:23:01 -0500
Subject: [PATCH 43/92] Chore: Bump prefect-ui-library to latest (#16278)
---
ui/package-lock.json | 34 +++++++++++++++++-----------------
ui/package.json | 2 +-
2 files changed, 18 insertions(+), 18 deletions(-)
diff --git a/ui/package-lock.json b/ui/package-lock.json
index a058ce3fa58f..5e1f5a8956e1 100644
--- a/ui/package-lock.json
+++ b/ui/package-lock.json
@@ -9,7 +9,7 @@
"version": "2.8.0",
"dependencies": {
"@prefecthq/prefect-design": "2.14.15",
- "@prefecthq/prefect-ui-library": "3.11.21",
+ "@prefecthq/prefect-ui-library": "3.11.24",
"@prefecthq/vue-charts": "2.0.5",
"@prefecthq/vue-compositions": "1.11.5",
"@types/lodash.debounce": "4.0.9",
@@ -1149,13 +1149,13 @@
}
},
"node_modules/@prefecthq/prefect-ui-library": {
- "version": "3.11.21",
- "resolved": "https://registry.npmjs.org/@prefecthq/prefect-ui-library/-/prefect-ui-library-3.11.21.tgz",
- "integrity": "sha512-wBQmSAXUhGIJCQX7kTeMBxkLCVSJMhfClSDqAm/IKPNAN8Wr7r2A9uRemJspdVgk2KLolGOhAagAlHi1pZtzlg==",
+ "version": "3.11.24",
+ "resolved": "https://registry.npmjs.org/@prefecthq/prefect-ui-library/-/prefect-ui-library-3.11.24.tgz",
+ "integrity": "sha512-ZJyTKldD6h1MoDqxDQe8GldiS1TLnMa3Bi0lPQthehZUPGqBtwZm+5uXe+Gh0oiyw8CrgrQnv+agffKFWSLqbQ==",
"dependencies": {
"@prefecthq/graphs": "2.4.1",
"axios": "1.7.4",
- "cronstrue": "^2.51.0",
+ "cronstrue": "^2.52.0",
"d3": "7.9.0",
"date-fns": "4.1.0",
"date-fns-tz": "3.2.0",
@@ -1163,11 +1163,11 @@
"lodash.merge": "4.6.2"
},
"peerDependencies": {
- "@prefecthq/prefect-design": "^2.11.5",
+ "@prefecthq/prefect-design": "^2.14.15",
"@prefecthq/vue-charts": "^2.0.3",
"@prefecthq/vue-compositions": "^1.11.4",
"vee-validate": "^4.7.0",
- "vue": "^3.4.26",
+ "vue": "^3.5.0",
"vue-router": "^4.3.0"
}
},
@@ -2681,9 +2681,9 @@
"devOptional": true
},
"node_modules/cronstrue": {
- "version": "2.51.0",
- "resolved": "https://registry.npmjs.org/cronstrue/-/cronstrue-2.51.0.tgz",
- "integrity": "sha512-7EG9VaZZ5SRbZ7m25dmP6xaS0qe9ay6wywMskFOU/lMDKa+3gZr2oeT5OUfXwRP/Bcj8wxdYJ65AHU70CI3tsw==",
+ "version": "2.52.0",
+ "resolved": "https://registry.npmjs.org/cronstrue/-/cronstrue-2.52.0.tgz",
+ "integrity": "sha512-NKgHbWkSZXJUcaBHSsyzC8eegD6bBd4O0oCI6XMIJ+y4Bq3v4w7sY3wfWoKPuVlq9pQHRB6od0lmKpIqi8TlKA==",
"bin": {
"cronstrue": "bin/cli.js"
}
@@ -8132,13 +8132,13 @@
}
},
"@prefecthq/prefect-ui-library": {
- "version": "3.11.21",
- "resolved": "https://registry.npmjs.org/@prefecthq/prefect-ui-library/-/prefect-ui-library-3.11.21.tgz",
- "integrity": "sha512-wBQmSAXUhGIJCQX7kTeMBxkLCVSJMhfClSDqAm/IKPNAN8Wr7r2A9uRemJspdVgk2KLolGOhAagAlHi1pZtzlg==",
+ "version": "3.11.24",
+ "resolved": "https://registry.npmjs.org/@prefecthq/prefect-ui-library/-/prefect-ui-library-3.11.24.tgz",
+ "integrity": "sha512-ZJyTKldD6h1MoDqxDQe8GldiS1TLnMa3Bi0lPQthehZUPGqBtwZm+5uXe+Gh0oiyw8CrgrQnv+agffKFWSLqbQ==",
"requires": {
"@prefecthq/graphs": "2.4.1",
"axios": "1.7.4",
- "cronstrue": "^2.51.0",
+ "cronstrue": "^2.52.0",
"d3": "7.9.0",
"date-fns": "4.1.0",
"date-fns-tz": "3.2.0",
@@ -9159,9 +9159,9 @@
"devOptional": true
},
"cronstrue": {
- "version": "2.51.0",
- "resolved": "https://registry.npmjs.org/cronstrue/-/cronstrue-2.51.0.tgz",
- "integrity": "sha512-7EG9VaZZ5SRbZ7m25dmP6xaS0qe9ay6wywMskFOU/lMDKa+3gZr2oeT5OUfXwRP/Bcj8wxdYJ65AHU70CI3tsw=="
+ "version": "2.52.0",
+ "resolved": "https://registry.npmjs.org/cronstrue/-/cronstrue-2.52.0.tgz",
+ "integrity": "sha512-NKgHbWkSZXJUcaBHSsyzC8eegD6bBd4O0oCI6XMIJ+y4Bq3v4w7sY3wfWoKPuVlq9pQHRB6od0lmKpIqi8TlKA=="
},
"cross-spawn": {
"version": "7.0.6",
diff --git a/ui/package.json b/ui/package.json
index 9f2437f8a05e..f68e5c7646a8 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -11,7 +11,7 @@
},
"dependencies": {
"@prefecthq/prefect-design": "2.14.15",
- "@prefecthq/prefect-ui-library": "3.11.21",
+ "@prefecthq/prefect-ui-library": "3.11.24",
"@prefecthq/vue-charts": "2.0.5",
"@prefecthq/vue-compositions": "1.11.5",
"@types/lodash.debounce": "4.0.9",
From 8401e209ebcbead3ae110d2b29ebf9410b7f661d Mon Sep 17 00:00:00 2001
From: nate nowack
Date: Mon, 9 Dec 2024 10:33:02 -0600
Subject: [PATCH 44/92] fix typing and route in flow run logs download api ref
(#16281)
---
.../rest-api/server/flow-runs/download-logs.mdx | 2 +-
src/prefect/server/api/flow_runs.py | 14 ++++++++------
2 files changed, 9 insertions(+), 7 deletions(-)
diff --git a/docs/v3/api-ref/rest-api/server/flow-runs/download-logs.mdx b/docs/v3/api-ref/rest-api/server/flow-runs/download-logs.mdx
index 025ea4c68981..f2a9f88c417b 100644
--- a/docs/v3/api-ref/rest-api/server/flow-runs/download-logs.mdx
+++ b/docs/v3/api-ref/rest-api/server/flow-runs/download-logs.mdx
@@ -1,3 +1,3 @@
---
-openapi: get /api/flow_runs/{id}/logs
+openapi: get /api/flow_runs/{id}/logs/download
---
\ No newline at end of file
diff --git a/src/prefect/server/api/flow_runs.py b/src/prefect/server/api/flow_runs.py
index 338193b13f3e..864469640a14 100644
--- a/src/prefect/server/api/flow_runs.py
+++ b/src/prefect/server/api/flow_runs.py
@@ -56,12 +56,12 @@
async def create_flow_run(
flow_run: schemas.actions.FlowRunCreate,
db: PrefectDBInterface = Depends(provide_database_interface),
- response: Response = None,
+ response: Response = None, # type: ignore
created_by: Optional[schemas.core.CreatedBy] = Depends(dependencies.get_created_by),
orchestration_parameters: Dict[str, Any] = Depends(
orchestration_dependencies.provide_flow_orchestration_parameters
),
- api_version=Depends(dependencies.provide_request_api_version),
+ api_version: str = Depends(dependencies.provide_request_api_version),
) -> schemas.responses.FlowRunResponse:
"""
Create a flow run. If a flow run with the same flow_id and
@@ -70,20 +70,22 @@ async def create_flow_run(
If no state is provided, the flow run will be created in a PENDING state.
"""
# hydrate the input model into a full flow run / state model
- flow_run = schemas.core.FlowRun(**flow_run.model_dump(), created_by=created_by)
+ flow_run_object = schemas.core.FlowRun(
+ **flow_run.model_dump(), created_by=created_by
+ )
# pass the request version to the orchestration engine to support compatibility code
orchestration_parameters.update({"api-version": api_version})
- if not flow_run.state:
- flow_run.state = schemas.states.Pending()
+ if not flow_run_object.state:
+ flow_run_object.state = schemas.states.Pending()
now = pendulum.now("UTC")
async with db.session_context(begin_transaction=True) as session:
model = await models.flow_runs.create_flow_run(
session=session,
- flow_run=flow_run,
+ flow_run=flow_run_object,
orchestration_parameters=orchestration_parameters,
)
if model.created >= now:
From 340e296d29e01bc986c3146e32526f21bbe9c4fe Mon Sep 17 00:00:00 2001
From: nate nowack
Date: Mon, 9 Dec 2024 10:33:11 -0600
Subject: [PATCH 45/92] avoiding timing issue in flaky test (#16275)
---
tests/test_flows.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/tests/test_flows.py b/tests/test_flows.py
index b68de0d96708..40793fe38662 100644
--- a/tests/test_flows.py
+++ b/tests/test_flows.py
@@ -1534,9 +1534,10 @@ def timeout_noticing_task():
@flow(timeout_seconds=0.1)
def my_subflow():
- time.sleep(0.5)
+ start = time.monotonic()
+ while time.monotonic() - start < 0.5:
+ pass
timeout_noticing_task()
- time.sleep(10)
nonlocal completed
completed = True
From f8a290efb4f5d52b81efb51d0d35c3dac495ace3 Mon Sep 17 00:00:00 2001
From: daniel-prefect
Date: Mon, 9 Dec 2024 11:15:56 -0800
Subject: [PATCH 46/92] Add a new tutorial which shows how to debug a failing
pipeline (#16225)
Co-authored-by: Jeff Hale
---
docs/mint.json | 3 +-
docs/v3/tutorials/debug.mdx | 116 +++++++++++++++++++++++++++++++++
docs/v3/tutorials/platform.mdx | 2 +
3 files changed, 120 insertions(+), 1 deletion(-)
create mode 100644 docs/v3/tutorials/debug.mdx
diff --git a/docs/mint.json b/docs/mint.json
index 456c9e75b15c..7adb99bc4338 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -73,7 +73,8 @@
{
"group": "For platform engineers",
"pages": [
- "v3/tutorials/platform"
+ "v3/tutorials/platform",
+ "v3/tutorials/debug"
]
}
],
diff --git a/docs/v3/tutorials/debug.mdx b/docs/v3/tutorials/debug.mdx
new file mode 100644
index 000000000000..dabac31e700e
--- /dev/null
+++ b/docs/v3/tutorials/debug.mdx
@@ -0,0 +1,116 @@
+---
+title: Debug a data pipeline
+description: Learn how to troubleshoot flow runs that fail.
+---
+
+In the [Set up a platform for data pipelines](/v3/tutorials/platform) tutorial, you used Prefect Cloud to set up a platform for data pipelines.
+In this tutorial, you'll learn what to do when those data pipelines fail.
+
+
+This tutorial starts where the [previous tutorial](/v3/tutorials/platform) leaves off, so complete that one first.
+
+
+## Find failures
+
+You can use the Prefect Cloud dashboard to find failures.
+
+1. Sign in to Prefect Cloud
+1. Use the workspace switcher to open the `staging` workspace that you created in the last tutorial.
+1. Go to **Home**, and look for red bars in the **Flow Runs** section, these indicate failed flow runs.
+1. Hover over a red bar to see more details about the flow run: name, deployment, duration, timestamp, and tags.
+
+
+You can filter by a specific tag (e.g. `team-a`) if you're only interested in a specific set of flows.
+
+
+## Debug a failure
+
+A single flow might experience failures on several runs.
+When this happens, it can be helpful to inspect the first failure in the series.
+
+1. In the **Flow Runs** section on the **Home** page, expand the `data-pipeline` flow.
+1. You will see a list of failing `data-pipeline` flow runs, in reverse chronological order.
+1. Use the pagination controls to navigate to the last failure in the list, this is the first failure that occurred.
+1. Click the name of the flow run to go to its detail page.
+1. From the flow run detail page, scroll down to the **Logs** section in the right panel.
+1. Look for an error message similar to the following:
++
+```
+File "/opt/prefect/demos/simulate_failures.py", line 12, in process_data
+ raise Exception(f"Run failed")
+```
+
+It looks like there's an error in the `simulate_failures.py` file.
+Now that you've found the failure, the next step is to fix the underlying code.
+
+## Update the code
+
+Open the `simulate_failures.py` file and look at line 12.
+
+```python simulate_failures.py {12}
+from prefect import flow, task
+import argparse
+import asyncio
+from prefect.client.orchestration import get_client
+
+
+@task
+def process_data(run: int, fail_at_run: int | None = None) -> bool:
+ """Simulate data processing with failures"""
+
+ # Simulate persistent failures
+ if fail_at_run and run > fail_at_run:
+ raise Exception(f"Run failed")
+
+ return True
+
+# ...
+```
+
+The `if` statement is the problem.
+If you specify the `--fail_at_run` flag, once the flow runs more than `fail_at_run` times, the flow fails with an exception.
+Remove the `if` statement to fix this failure.
+We added this statement to give you something to fix. :)
+
+```python simulate_failures.py
+from prefect import flow, task
+import argparse
+import asyncio
+from prefect.client.orchestration import get_client
+
+@task
+def process_data(run: int, fail_at_run: int | None = None) -> bool:
+ """Simulate data processing with failures"""
+
+ return True
+
+# ...
+```
+
+Now, all flow runs succeed in spite of the `--fail-at-run` flag.
+Deploy the fix to the staging workspace to confirm this new behavior.
+
+```bash
+prefect cloud workspace --set "/staging"
+python simulate_failures.py --fail-at-run 3
+```
+
+After the script finishes, open the **Home** page in Prefect Cloud to verify that the flow run is no longer failing.
+
+You can now switch workspaces to update the code used in the production workspace as well.
+
+```bash
+prefect cloud workspace --set "/production"
+python simulate_failures.py
+```
+
+
+## Next steps
+
+In this tutorial, you successfully used Prefect Cloud to fix a failing data pipeline.
+
+To take this to the next level, learn how to [set up an alert](/v3/automate/events/automations-triggers) so that you get notified about failures automatically.
+
+
+Need help? [Book a meeting](https://calendly.com/prefect-experts/prefect-product-advocates?utm_campaign=prefect_docs_cloud&utm_content=prefect_docs&utm_medium=docs&utm_source=docs) with a Prefect Product Advocate to get your questions answered.
+
diff --git a/docs/v3/tutorials/platform.mdx b/docs/v3/tutorials/platform.mdx
index 07f5543642d7..bcd5ceadaac2 100644
--- a/docs/v3/tutorials/platform.mdx
+++ b/docs/v3/tutorials/platform.mdx
@@ -188,6 +188,8 @@ If this doesn't perfectly match your use case, here are some variations you can
- You can [write flows from scratch](/v3/develop/write-flows).
- You can [automate deployments with GitHub Actions](/v3/deploy/infrastructure-concepts/deploy-ci-cd).
+Next, learn how to [debug a flow run](/v3/tutorials/debug) when things go wrong.
+
Need help? [Book a meeting](https://calendly.com/prefect-experts/prefect-product-advocates?utm_campaign=prefect_docs_cloud&utm_content=prefect_docs&utm_medium=docs&utm_source=docs) with a Prefect Product Advocate to get your questions answered.
From 3ae5acece3d30b6a5a9ab56a46f08e0d4e0c3265 Mon Sep 17 00:00:00 2001
From: Emil Christensen
Date: Mon, 9 Dec 2024 14:44:48 -0500
Subject: [PATCH 47/92] Adds note to rate limits doc linking to client settings
(#16286)
---
docs/v3/manage/cloud/rate-limits.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/v3/manage/cloud/rate-limits.mdx b/docs/v3/manage/cloud/rate-limits.mdx
index 24d8c70091cf..ca4eaa8112df 100644
--- a/docs/v3/manage/cloud/rate-limits.mdx
+++ b/docs/v3/manage/cloud/rate-limits.mdx
@@ -20,7 +20,7 @@ The `flow_runs`, `task_runs`, and `flows` endpoints and their subroutes are limi
- 400 requests per minute for Free accounts
- 2,000 requests per minute for Pro accounts
-These endpoints return a `429` response with an appropriate `Retry-After` header if this limit is triggered.
+These endpoints return a `429` response with an appropriate `Retry-After` header if this limit is triggered. See [ClientSettings](/v3/develop/settings-ref#clientsettings) for more information on how retries are handled client-side and how to modify the default behavior.
The `logs` endpoint is limited to:
From 73e8c496dd328b5c4ea34e4d6c7201dc0ac3c583 Mon Sep 17 00:00:00 2001
From: Devin Villarosa <102188207+devinvillarosa@users.noreply.github.com>
Date: Mon, 9 Dec 2024 13:13:34 -0800
Subject: [PATCH 48/92] [UI v2]: Adds active column and actions for global
concurrency limit table (#16268)
---
.../data-table/actions-cell.tsx | 56 +++++++++++++++
.../data-table/active-cell.tsx | 48 +++++++++++++
.../data-table/data-table.tsx | 32 ++++++++-
.../create-or-edit-limit-dialog/index.tsx | 21 ++++--
.../use-create-or-edit-limit-form.ts | 0
.../dialog/delete-limit-dialog.tsx | 69 +++++++++++++++++++
.../global-concurrency-view/dialog/index.tsx | 50 ++++++++++++++
...bal-concurrency-limit-empty-state.test.tsx | 0
.../global-concurrency-limit-empty-state.tsx | 0
.../empty-state/index.ts | 1 +
.../global-concurrency-view/index.tsx | 54 +++++++++++----
ui-v2/src/components/ui/switch.tsx | 2 +-
12 files changed, 311 insertions(+), 22 deletions(-)
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/data-table/actions-cell.tsx
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/data-table/active-cell.tsx
rename ui-v2/src/components/concurrency/global-concurrency-view/{ => dialog}/create-or-edit-limit-dialog/index.tsx (86%)
rename ui-v2/src/components/concurrency/global-concurrency-view/{ => dialog}/create-or-edit-limit-dialog/use-create-or-edit-limit-form.ts (100%)
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/dialog/delete-limit-dialog.tsx
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/dialog/index.tsx
rename ui-v2/src/components/concurrency/global-concurrency-view/{ => empty-state}/global-concurrency-limit-empty-state.test.tsx (100%)
rename ui-v2/src/components/concurrency/global-concurrency-view/{ => empty-state}/global-concurrency-limit-empty-state.tsx (100%)
create mode 100644 ui-v2/src/components/concurrency/global-concurrency-view/empty-state/index.ts
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/data-table/actions-cell.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/actions-cell.tsx
new file mode 100644
index 000000000000..6d3d51cac207
--- /dev/null
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/actions-cell.tsx
@@ -0,0 +1,56 @@
+import { Button } from "@/components/ui/button";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuLabel,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
+import { Icon } from "@/components/ui/icons";
+import { type GlobalConcurrencyLimit } from "@/hooks/global-concurrency-limits";
+import { useToast } from "@/hooks/use-toast";
+import { CellContext } from "@tanstack/react-table";
+
+type Props = CellContext & {
+ onEditRow: (row: GlobalConcurrencyLimit) => void;
+ onDeleteRow: (row: GlobalConcurrencyLimit) => void;
+};
+
+export const ActionsCell = ({ onEditRow, onDeleteRow, ...props }: Props) => {
+ const { toast } = useToast();
+
+ const handleCopyId = (id: string | undefined) => {
+ if (!id) {
+ throw new Error("'id' field expected in GlobalConcurrencyLimit");
+ }
+ void navigator.clipboard.writeText(id);
+ toast({ title: "Name copied" });
+ };
+
+ const row = props.row.original;
+
+ return (
+
+
+
+
+
+
+ Actions
+ handleCopyId(row.id)}>
+ Copy ID
+
+ onDeleteRow(row)}>
+ Delete
+
+ onEditRow(row)}>
+ Edit
+
+
+
+
+ );
+};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/data-table/active-cell.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/active-cell.tsx
new file mode 100644
index 000000000000..17d3ad943165
--- /dev/null
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/active-cell.tsx
@@ -0,0 +1,48 @@
+import { useToast } from "@/hooks/use-toast";
+import type { CellContext } from "@tanstack/react-table";
+
+import { Switch } from "@/components/ui/switch";
+import {
+ type GlobalConcurrencyLimit,
+ useUpdateGlobalConcurrencyLimit,
+} from "@/hooks/global-concurrency-limits";
+
+export const ActiveCell = (
+ props: CellContext,
+) => {
+ const { toast } = useToast();
+ const { updateGlobalConcurrencyLimit } = useUpdateGlobalConcurrencyLimit();
+
+ const handleCheckedChange = (checked: boolean, id: string | undefined) => {
+ if (!id) {
+ throw new Error("Expecting 'id' of global concurrent limit");
+ }
+
+ updateGlobalConcurrencyLimit(
+ {
+ id_or_name: id,
+ active: checked,
+ },
+ {
+ onSuccess: () => {
+ toast({ description: "Concurrency limit updated" });
+ },
+ onError: (error) => {
+ const message =
+ error.message || "Unknown error while updating active field.";
+ console.error(message);
+ },
+ },
+ );
+ };
+
+ const rowActive = props.getValue();
+ const rowId = props.row.original.id;
+
+ return (
+ handleCheckedChange(checked, rowId)}
+ />
+ );
+};
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx
index 4645d6c5e4bf..a3b893c61bcd 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/data-table/data-table.tsx
@@ -6,8 +6,18 @@ import {
useReactTable,
} from "@tanstack/react-table";
+import { ActionsCell } from "./actions-cell";
+import { ActiveCell } from "./active-cell";
+
const columnHelper = createColumnHelper();
-const columns = [
+
+const createColumns = ({
+ onEditRow,
+ onDeleteRow,
+}: {
+ onEditRow: (row: GlobalConcurrencyLimit) => void;
+ onDeleteRow: (row: GlobalConcurrencyLimit) => void;
+}) => [
columnHelper.accessor("name", {
header: "Name",
}),
@@ -20,16 +30,32 @@ const columns = [
columnHelper.accessor("slot_decay_per_second", {
header: "Slots Decay Per Second",
}),
+ columnHelper.accessor("active", {
+ header: "Active",
+ cell: ActiveCell,
+ }),
+ columnHelper.display({
+ id: "actions",
+ cell: (props) => (
+
+ ),
+ }),
];
type Props = {
data: Array;
+ onEditRow: (row: GlobalConcurrencyLimit) => void;
+ onDeleteRow: (row: GlobalConcurrencyLimit) => void;
};
-export const GlobalConcurrencyDataTable = ({ data }: Props) => {
+export const GlobalConcurrencyDataTable = ({
+ data,
+ onEditRow,
+ onDeleteRow,
+}: Props) => {
const table = useReactTable({
data,
- columns,
+ columns: createColumns({ onEditRow, onDeleteRow }),
getCoreRowModel: getCoreRowModel(),
});
diff --git a/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx b/ui-v2/src/components/concurrency/global-concurrency-view/dialog/create-or-edit-limit-dialog/index.tsx
similarity index 86%
rename from ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx
rename to ui-v2/src/components/concurrency/global-concurrency-view/dialog/create-or-edit-limit-dialog/index.tsx
index 5e737dda25d3..fad2fb64385c 100644
--- a/ui-v2/src/components/concurrency/global-concurrency-view/create-or-edit-limit-dialog/index.tsx
+++ b/ui-v2/src/components/concurrency/global-concurrency-view/dialog/create-or-edit-limit-dialog/index.tsx
@@ -22,17 +22,15 @@ import { type GlobalConcurrencyLimit } from "@/hooks/global-concurrency-limits";
import { useCreateOrEditLimitForm } from "./use-create-or-edit-limit-form";
type Props = {
- limitToUpdate: undefined | GlobalConcurrencyLimit;
+ limitToUpdate?: GlobalConcurrencyLimit;
onOpenChange: (open: boolean) => void;
onSubmit: () => void;
- open: boolean;
};
export const CreateOrEditLimitDialog = ({
limitToUpdate,
onOpenChange,
onSubmit,
- open,
}: Props) => {
const { form, isLoading, saveOrUpdate } = useCreateOrEditLimitForm({
limitToUpdate,
@@ -44,7 +42,7 @@ export const CreateOrEditLimitDialog = ({
: "Add Concurrency Limit";
return (
-