From dfb265f2969cbe5ba0dd01900c76f5b05c786ab9 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 16:21:38 +0200 Subject: [PATCH 01/19] feat(debug): send tasks info --- libs/langgraph/langgraph/pregel/debug.py | 61 +++++++++++++++++++++++- 1 file changed, 59 insertions(+), 2 deletions(-) diff --git a/libs/langgraph/langgraph/pregel/debug.py b/libs/langgraph/langgraph/pregel/debug.py index 7e8f9546f..985cffe75 100644 --- a/libs/langgraph/langgraph/pregel/debug.py +++ b/libs/langgraph/langgraph/pregel/debug.py @@ -15,14 +15,25 @@ ) from uuid import UUID +from langchain_core.runnables import RunnableLambda, RunnableSequence from langchain_core.runnables.config import RunnableConfig +from langchain_core.runnables.utils import get_function_nonlocals from langchain_core.utils.input import get_bolded_text, get_colored_text from langgraph.channels.base import BaseChannel from langgraph.checkpoint.base import Checkpoint, CheckpointMetadata, PendingWrite -from langgraph.constants import ERROR, INTERRUPT, TAG_HIDDEN +from langgraph.constants import ( + CONF, + CONFIG_KEY_CHECKPOINT_NS, + ERROR, + INTERRUPT, + NS_END, + NS_SEP, + TAG_HIDDEN, +) from langgraph.pregel.io import read_channels from langgraph.types import PregelExecutableTask, PregelTask, StateSnapshot +from langgraph.utils.runnable import RunnableCallable, RunnableSeq class TaskPayload(TypedDict): @@ -45,6 +56,7 @@ class CheckpointTask(TypedDict): name: str error: Optional[str] interrupts: list[dict] + state: Optional[RunnableConfig] class CheckpointPayload(TypedDict): @@ -140,6 +152,49 @@ def map_debug_checkpoint( parent_config: Optional[RunnableConfig], ) -> Iterator[DebugOutputCheckpoint]: """Produce "checkpoint" events for stream_mode=debug.""" + + parent_ns = config[CONF].get(CONFIG_KEY_CHECKPOINT_NS, "") + task_states: dict[str, RunnableConfig] = {} + + for task in tasks: + subgraph = None + candidates = [task.proc] + for c in candidates: + # Cannot do isinstance(c, Pregel) due to circular imports + if "Pregel" in [t.__name__ for t in type(c).__mro__]: + subgraph = c + + if isinstance(c, RunnableSequence) or isinstance(c, RunnableSeq): + candidates.extend(c.steps) + elif isinstance(c, RunnableLambda): + candidates.extend(c.deps) + elif isinstance(c, RunnableCallable): + if c.func is not None: + candidates.extend( + nl.__self__ if hasattr(nl, "__self__") else nl + for nl in get_function_nonlocals(c.func) + ) + if c.afunc is not None: + candidates.extend( + nl.__self__ if hasattr(nl, "__self__") else nl + for nl in get_function_nonlocals(c.afunc) + ) + if not subgraph: + continue + + # assemble checkpoint_ns for this task + task_ns = f"{task.name}{NS_END}{task.id}" + if parent_ns: + task_ns = f"{parent_ns}{NS_SEP}{task_ns}" + + # set config as signal that subgraph checkpoints exist + task_states[task.id] = { + CONF: { + "thread_id": config[CONF]["thread_id"], + CONFIG_KEY_CHECKPOINT_NS: task_ns, + } + } + yield { "type": "checkpoint", "timestamp": checkpoint["ts"], @@ -155,14 +210,16 @@ def map_debug_checkpoint( "id": t.id, "name": t.name, "error": t.error, + "state": task_states.get(t.id) if task_states else None, } if t.error else { "id": t.id, "name": t.name, "interrupts": tuple(asdict(i) for i in t.interrupts), + "state": task_states.get(t.id) if task_states else None, } - for t in tasks_w_writes(tasks, pending_writes, None) + for t in tasks_w_writes(tasks, pending_writes, task_states) ], }, } From c1081af6bc74306175af324963cbbcfeba40f881 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 16:41:48 +0200 Subject: [PATCH 02/19] Fix lint --- libs/langgraph/langgraph/pregel/debug.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/langgraph/pregel/debug.py b/libs/langgraph/langgraph/pregel/debug.py index 985cffe75..cf90c431b 100644 --- a/libs/langgraph/langgraph/pregel/debug.py +++ b/libs/langgraph/langgraph/pregel/debug.py @@ -154,7 +154,7 @@ def map_debug_checkpoint( """Produce "checkpoint" events for stream_mode=debug.""" parent_ns = config[CONF].get(CONFIG_KEY_CHECKPOINT_NS, "") - task_states: dict[str, RunnableConfig] = {} + task_states: dict[str, Union[RunnableConfig, StateSnapshot]] = {} for task in tasks: subgraph = None From 50b1a1e230a753fa8a0e26cca4968646f966a730 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 17:01:40 +0200 Subject: [PATCH 03/19] Cleanup, move instanceof checks to an util --- libs/langgraph/langgraph/pregel/__init__.py | 33 +++---------------- libs/langgraph/langgraph/pregel/debug.py | 28 ++--------------- libs/langgraph/langgraph/pregel/utils.py | 35 +++++++++++++++++++++ 3 files changed, 41 insertions(+), 55 deletions(-) diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index 3d7414494..268c0241b 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -26,7 +26,6 @@ from langchain_core.globals import get_debug from langchain_core.runnables import ( Runnable, - RunnableLambda, RunnableSequence, ) from langchain_core.runnables.base import Input, Output @@ -37,7 +36,6 @@ ) from langchain_core.runnables.utils import ( ConfigurableFieldSpec, - get_function_nonlocals, get_unique_config_specs, ) from langchain_core.tracers._streaming import _StreamingCallbackHandler @@ -86,7 +84,7 @@ from langgraph.pregel.read import PregelNode from langgraph.pregel.retry import RetryPolicy from langgraph.pregel.runner import PregelRunner -from langgraph.pregel.utils import get_new_channel_versions +from langgraph.pregel.utils import find_subgraph_pregel, get_new_channel_versions from langgraph.pregel.validate import validate_graph, validate_keys from langgraph.pregel.write import ChannelWrite, ChannelWriteEntry from langgraph.store.base import BaseStore @@ -100,7 +98,6 @@ ) from langgraph.utils.pydantic import create_model from langgraph.utils.queue import AsyncQueue, SyncQueue # type: ignore[attr-defined] -from langgraph.utils.runnable import RunnableCallable WriteValue = Union[Callable[[Input], Output], Any] @@ -391,32 +388,10 @@ def get_subgraphs( if namespace is not None: if not namespace.startswith(name): continue + # find the subgraph, if any - graph: Optional[Pregel] = None - candidates = [node.bound] - for candidate in candidates: - if ( - isinstance(candidate, Pregel) - # subgraphs that disabled checkpointing are not considered - and candidate.checkpointer is not False - ): - graph = candidate - break - elif isinstance(candidate, RunnableSequence): - candidates.extend(candidate.steps) - elif isinstance(candidate, RunnableLambda): - candidates.extend(candidate.deps) - elif isinstance(candidate, RunnableCallable): - if candidate.func is not None: - candidates.extend( - nl.__self__ if hasattr(nl, "__self__") else nl - for nl in get_function_nonlocals(candidate.func) - ) - if candidate.afunc is not None: - candidates.extend( - nl.__self__ if hasattr(nl, "__self__") else nl - for nl in get_function_nonlocals(candidate.afunc) - ) + graph: Optional[Pregel] = find_subgraph_pregel(node.bound) + # if found, yield recursively if graph: if name == namespace: diff --git a/libs/langgraph/langgraph/pregel/debug.py b/libs/langgraph/langgraph/pregel/debug.py index cf90c431b..0516c0fd1 100644 --- a/libs/langgraph/langgraph/pregel/debug.py +++ b/libs/langgraph/langgraph/pregel/debug.py @@ -15,9 +15,7 @@ ) from uuid import UUID -from langchain_core.runnables import RunnableLambda, RunnableSequence from langchain_core.runnables.config import RunnableConfig -from langchain_core.runnables.utils import get_function_nonlocals from langchain_core.utils.input import get_bolded_text, get_colored_text from langgraph.channels.base import BaseChannel @@ -32,8 +30,8 @@ TAG_HIDDEN, ) from langgraph.pregel.io import read_channels +from langgraph.pregel.utils import find_subgraph_pregel from langgraph.types import PregelExecutableTask, PregelTask, StateSnapshot -from langgraph.utils.runnable import RunnableCallable, RunnableSeq class TaskPayload(TypedDict): @@ -157,29 +155,7 @@ def map_debug_checkpoint( task_states: dict[str, Union[RunnableConfig, StateSnapshot]] = {} for task in tasks: - subgraph = None - candidates = [task.proc] - for c in candidates: - # Cannot do isinstance(c, Pregel) due to circular imports - if "Pregel" in [t.__name__ for t in type(c).__mro__]: - subgraph = c - - if isinstance(c, RunnableSequence) or isinstance(c, RunnableSeq): - candidates.extend(c.steps) - elif isinstance(c, RunnableLambda): - candidates.extend(c.deps) - elif isinstance(c, RunnableCallable): - if c.func is not None: - candidates.extend( - nl.__self__ if hasattr(nl, "__self__") else nl - for nl in get_function_nonlocals(c.func) - ) - if c.afunc is not None: - candidates.extend( - nl.__self__ if hasattr(nl, "__self__") else nl - for nl in get_function_nonlocals(c.afunc) - ) - if not subgraph: + if not find_subgraph_pregel(task.proc): continue # assemble checkpoint_ns for this task diff --git a/libs/langgraph/langgraph/pregel/utils.py b/libs/langgraph/langgraph/pregel/utils.py index 3a29e5ed1..2f9b9d411 100644 --- a/libs/langgraph/langgraph/pregel/utils.py +++ b/libs/langgraph/langgraph/pregel/utils.py @@ -1,4 +1,13 @@ +from typing import TYPE_CHECKING, Optional + +from langchain_core.runnables import RunnableLambda, RunnableSequence +from langchain_core.runnables.utils import get_function_nonlocals + from langgraph.checkpoint.base import ChannelVersions +from langgraph.utils.runnable import Runnable, RunnableCallable, RunnableSeq + +if TYPE_CHECKING: + from langgraph.pregel import Pregel def get_new_channel_versions( @@ -17,3 +26,29 @@ def get_new_channel_versions( new_versions = current_versions return new_versions + + +def find_subgraph_pregel(candidate: Runnable) -> Optional[Pregel]: + candidates: list[Runnable] = [candidate] + + for c in candidates: + # Cannot do isinstance(c, Pregel) due to circular imports + if "Pregel" in [t.__name__ for t in type(c).__mro__]: + return c + elif isinstance(c, RunnableSequence) or isinstance(c, RunnableSeq): + candidates.extend(c.steps) + elif isinstance(c, RunnableLambda): + candidates.extend(c.deps) + elif isinstance(c, RunnableCallable): + if c.func is not None: + candidates.extend( + nl.__self__ if hasattr(nl, "__self__") else nl + for nl in get_function_nonlocals(c.func) + ) + if c.afunc is not None: + candidates.extend( + nl.__self__ if hasattr(nl, "__self__") else nl + for nl in get_function_nonlocals(c.afunc) + ) + + return None From 7e9cf02922b80a832e02609d4d4ff9d62a9f9820 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 17:08:08 +0200 Subject: [PATCH 04/19] Use casting instead --- libs/langgraph/langgraph/pregel/__init__.py | 2 +- libs/langgraph/langgraph/pregel/utils.py | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index 268c0241b..98d82712b 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -390,7 +390,7 @@ def get_subgraphs( continue # find the subgraph, if any - graph: Optional[Pregel] = find_subgraph_pregel(node.bound) + graph = cast(Optional[Pregel], find_subgraph_pregel(node.bound)) # if found, yield recursively if graph: diff --git a/libs/langgraph/langgraph/pregel/utils.py b/libs/langgraph/langgraph/pregel/utils.py index 2f9b9d411..3f5fe54d1 100644 --- a/libs/langgraph/langgraph/pregel/utils.py +++ b/libs/langgraph/langgraph/pregel/utils.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Optional +from typing import Optional from langchain_core.runnables import RunnableLambda, RunnableSequence from langchain_core.runnables.utils import get_function_nonlocals @@ -6,9 +6,6 @@ from langgraph.checkpoint.base import ChannelVersions from langgraph.utils.runnable import Runnable, RunnableCallable, RunnableSeq -if TYPE_CHECKING: - from langgraph.pregel import Pregel - def get_new_channel_versions( previous_versions: ChannelVersions, current_versions: ChannelVersions @@ -28,7 +25,7 @@ def get_new_channel_versions( return new_versions -def find_subgraph_pregel(candidate: Runnable) -> Optional[Pregel]: +def find_subgraph_pregel(candidate: Runnable) -> Optional[Runnable]: candidates: list[Runnable] = [candidate] for c in candidates: From 4f61dd1aa65d4c38acfa17975181b7aac49d9bcb Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 19:18:52 +0200 Subject: [PATCH 05/19] Fix tests --- libs/langgraph/tests/test_pregel.py | 15 +++--- libs/langgraph/tests/test_pregel_async.py | 59 ++++++++++++++++++++--- 2 files changed, 61 insertions(+), 13 deletions(-) diff --git a/libs/langgraph/tests/test_pregel.py b/libs/langgraph/tests/test_pregel.py index 196fe4223..89088bd1d 100644 --- a/libs/langgraph/tests/test_pregel.py +++ b/libs/langgraph/tests/test_pregel.py @@ -6996,11 +6996,14 @@ class State(TypedDict): # test stream_mode=debug tool_two = tool_two_graph.compile(checkpointer=checkpointer) thread10 = {"configurable": {"thread_id": "10"}} - assert [ + + res = [ *tool_two.stream( {"my_key": "value", "market": "DE"}, thread10, stream_mode="debug" ) - ] == [ + ] + + assert res == [ { "type": "checkpoint", "timestamp": AnyStr(), @@ -7026,7 +7029,7 @@ class State(TypedDict): }, "parent_config": None, "next": ["__start__"], - "tasks": [{"id": AnyStr(), "name": "__start__", "interrupts": ()}], + "tasks": [{"id": AnyStr(), "name": "__start__", "interrupts": (), "state": None}], }, }, { @@ -7067,7 +7070,7 @@ class State(TypedDict): }, }, "next": ["prepare"], - "tasks": [{"id": AnyStr(), "name": "prepare", "interrupts": ()}], + "tasks": [{"id": AnyStr(), "name": "prepare", "interrupts": (), "state": None}], }, }, { @@ -7131,7 +7134,7 @@ class State(TypedDict): }, }, "next": ["tool_two_slow"], - "tasks": [{"id": AnyStr(), "name": "tool_two_slow", "interrupts": ()}], + "tasks": [{"id": AnyStr(), "name": "tool_two_slow", "interrupts": (), "state": None}], }, }, { @@ -7195,7 +7198,7 @@ class State(TypedDict): }, }, "next": ["finish"], - "tasks": [{"id": AnyStr(), "name": "finish", "interrupts": ()}], + "tasks": [{"id": AnyStr(), "name": "finish", "interrupts": (), "state": None}], }, }, { diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index 0290bf069..2c938b543 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -5655,7 +5655,14 @@ class State(TypedDict): }, "parent_config": None, "next": ["__start__"], - "tasks": [{"id": AnyStr(), "name": "__start__", "interrupts": ()}], + "tasks": [ + { + "id": AnyStr(), + "name": "__start__", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -5696,7 +5703,14 @@ class State(TypedDict): }, }, "next": ["prepare"], - "tasks": [{"id": AnyStr(), "name": "prepare", "interrupts": ()}], + "tasks": [ + { + "id": AnyStr(), + "name": "prepare", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -5761,7 +5775,12 @@ class State(TypedDict): }, "next": ["tool_two_slow"], "tasks": [ - {"id": AnyStr(), "name": "tool_two_slow", "interrupts": ()} + { + "id": AnyStr(), + "name": "tool_two_slow", + "interrupts": (), + "state": None, + } ], }, }, @@ -5826,7 +5845,14 @@ class State(TypedDict): }, }, "next": ["finish"], - "tasks": [{"id": AnyStr(), "name": "finish", "interrupts": ()}], + "tasks": [ + { + "id": AnyStr(), + "name": "finish", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -5937,7 +5963,14 @@ class State(TypedDict): }, "parent_config": None, "next": ["__start__"], - "tasks": [{"id": AnyStr(), "name": "__start__", "interrupts": ()}], + "tasks": [ + { + "id": AnyStr(), + "name": "__start__", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -5978,7 +6011,14 @@ class State(TypedDict): }, }, "next": ["prepare"], - "tasks": [{"id": AnyStr(), "name": "prepare", "interrupts": ()}], + "tasks": [ + { + "id": AnyStr(), + "name": "prepare", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -6043,7 +6083,12 @@ class State(TypedDict): }, "next": ["tool_two_slow"], "tasks": [ - {"id": AnyStr(), "name": "tool_two_slow", "interrupts": ()} + { + "id": AnyStr(), + "name": "tool_two_slow", + "interrupts": (), + "state": None, + } ], }, }, From e3bee7d8436837cb3eefeb56d6dae1015fa53ec6 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 19:52:05 +0200 Subject: [PATCH 06/19] Add tests --- libs/langgraph/langgraph/pregel/debug.py | 4 +- libs/langgraph/tests/test_pregel.py | 102 +++++++++++++++++++++-- 2 files changed, 98 insertions(+), 8 deletions(-) diff --git a/libs/langgraph/langgraph/pregel/debug.py b/libs/langgraph/langgraph/pregel/debug.py index 0516c0fd1..53d49f7e1 100644 --- a/libs/langgraph/langgraph/pregel/debug.py +++ b/libs/langgraph/langgraph/pregel/debug.py @@ -186,14 +186,14 @@ def map_debug_checkpoint( "id": t.id, "name": t.name, "error": t.error, - "state": task_states.get(t.id) if task_states else None, + "state": t.state, } if t.error else { "id": t.id, "name": t.name, "interrupts": tuple(asdict(i) for i in t.interrupts), - "state": task_states.get(t.id) if task_states else None, + "state": t.state, } for t in tasks_w_writes(tasks, pending_writes, task_states) ], diff --git a/libs/langgraph/tests/test_pregel.py b/libs/langgraph/tests/test_pregel.py index 89088bd1d..7357a0f43 100644 --- a/libs/langgraph/tests/test_pregel.py +++ b/libs/langgraph/tests/test_pregel.py @@ -6996,13 +6996,13 @@ class State(TypedDict): # test stream_mode=debug tool_two = tool_two_graph.compile(checkpointer=checkpointer) thread10 = {"configurable": {"thread_id": "10"}} - + res = [ *tool_two.stream( {"my_key": "value", "market": "DE"}, thread10, stream_mode="debug" ) ] - + assert res == [ { "type": "checkpoint", @@ -7029,7 +7029,14 @@ class State(TypedDict): }, "parent_config": None, "next": ["__start__"], - "tasks": [{"id": AnyStr(), "name": "__start__", "interrupts": (), "state": None}], + "tasks": [ + { + "id": AnyStr(), + "name": "__start__", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -7070,7 +7077,9 @@ class State(TypedDict): }, }, "next": ["prepare"], - "tasks": [{"id": AnyStr(), "name": "prepare", "interrupts": (), "state": None}], + "tasks": [ + {"id": AnyStr(), "name": "prepare", "interrupts": (), "state": None} + ], }, }, { @@ -7134,7 +7143,14 @@ class State(TypedDict): }, }, "next": ["tool_two_slow"], - "tasks": [{"id": AnyStr(), "name": "tool_two_slow", "interrupts": (), "state": None}], + "tasks": [ + { + "id": AnyStr(), + "name": "tool_two_slow", + "interrupts": (), + "state": None, + } + ], }, }, { @@ -7198,7 +7214,9 @@ class State(TypedDict): }, }, "next": ["finish"], - "tasks": [{"id": AnyStr(), "name": "finish", "interrupts": (), "state": None}], + "tasks": [ + {"id": AnyStr(), "name": "finish", "interrupts": (), "state": None} + ], }, }, { @@ -11564,3 +11582,75 @@ def baz(state: State): graph = graph.compile() assert graph.invoke({"foo": "hello"}) == {"foo": "hello", "bar": "hello!"} + + +def test_debug_subgraphs(): + class State(TypedDict): + messages: Annotated[list[str], operator.add] + + def node(name): + def _node(state: State): + return {"messages": [f"entered {name} node"]} + + return _node + + grand_parent = StateGraph(State) + parent = StateGraph(State) + child = StateGraph(State) + + child.add_node("c_one", node("c_one")) + child.add_node("c_two", node("c_two")) + child.add_edge(START, "c_one") + child.add_edge("c_one", "c_two") + child.add_edge("c_two", END) + + parent.add_node("p_one", node("p_one")) + parent.add_node("p_two", child.compile()) + parent.add_edge(START, "p_one") + parent.add_edge("p_one", "p_two") + parent.add_edge("p_two", END) + + grand_parent.add_node("gp_one", node("gp_one")) + grand_parent.add_node("gp_two", parent.compile()) + grand_parent.add_edge(START, "gp_one") + grand_parent.add_edge("gp_one", "gp_two") + grand_parent.add_edge("gp_two", END) + + graph = grand_parent.compile(checkpointer=MemorySaver()) + + config = {"configurable": {"thread_id": "1"}} + events = [ + *graph.stream( + {"messages": []}, + config=config, + stream_mode="debug", + ) + ] + + checkpoint_events = list( + reversed([e["payload"] for e in events if e["type"] == "checkpoint"]) + ) + checkpoint_history = list(graph.get_state_history(config)) + + assert len(checkpoint_events) == len(checkpoint_history) + + def normalize_config(config: dict | None) -> dict | None: + if config is None: + return None + return config["configurable"] + + for stream, history in zip(checkpoint_events, checkpoint_history): + assert stream["values"] == history.values + assert stream["next"] == list(history.next) + assert normalize_config(stream["config"]) == normalize_config(history.config) + assert normalize_config(stream["parent_config"]) == normalize_config( + history.parent_config + ) + + assert len(stream["tasks"]) == len(history.tasks) + for stream_task, history_task in zip(stream["tasks"], history.tasks): + assert stream_task["id"] == history_task.id + assert stream_task["name"] == history_task.name + assert stream_task["interrupts"] == history_task.interrupts + assert stream_task.get("error") == history_task.error + assert stream_task.get("state") == history_task.state From e5649027532432466670e8a73909fd02f08f728d Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 19:57:39 +0200 Subject: [PATCH 07/19] Add async tests --- libs/langgraph/tests/test_pregel_async.py | 73 +++++++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index 2c938b543..bb8d4ff25 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -9818,3 +9818,76 @@ async def node(input: State, config: RunnableConfig, store: BaseStore): assert ( len((await the_store.asearch(("foo", "bar")))) == 1 ) # still overwriting the same one + + +async def test_debug_subgraphs(): + class State(TypedDict): + messages: Annotated[list[str], operator.add] + + def node(name): + async def _node(state: State): + return {"messages": [f"entered {name} node"]} + + return _node + + grand_parent = StateGraph(State) + parent = StateGraph(State) + child = StateGraph(State) + + child.add_node("c_one", node("c_one")) + child.add_node("c_two", node("c_two")) + child.add_edge(START, "c_one") + child.add_edge("c_one", "c_two") + child.add_edge("c_two", END) + + parent.add_node("p_one", node("p_one")) + parent.add_node("p_two", child.compile()) + parent.add_edge(START, "p_one") + parent.add_edge("p_one", "p_two") + parent.add_edge("p_two", END) + + grand_parent.add_node("gp_one", node("gp_one")) + grand_parent.add_node("gp_two", parent.compile()) + grand_parent.add_edge(START, "gp_one") + grand_parent.add_edge("gp_one", "gp_two") + grand_parent.add_edge("gp_two", END) + + graph = grand_parent.compile(checkpointer=MemorySaver()) + + config = {"configurable": {"thread_id": "1"}} + events = [ + c + async for c in graph.astream( + {"messages": []}, + config=config, + stream_mode="debug", + ) + ] + + checkpoint_events = list( + reversed([e["payload"] for e in events if e["type"] == "checkpoint"]) + ) + checkpoint_history = [c async for c in graph.aget_state_history(config)] + + assert len(checkpoint_events) == len(checkpoint_history) + + def normalize_config(config: dict | None) -> dict | None: + if config is None: + return None + return config["configurable"] + + for stream, history in zip(checkpoint_events, checkpoint_history): + assert stream["values"] == history.values + assert stream["next"] == list(history.next) + assert normalize_config(stream["config"]) == normalize_config(history.config) + assert normalize_config(stream["parent_config"]) == normalize_config( + history.parent_config + ) + + assert len(stream["tasks"]) == len(history.tasks) + for stream_task, history_task in zip(stream["tasks"], history.tasks): + assert stream_task["id"] == history_task.id + assert stream_task["name"] == history_task.name + assert stream_task["interrupts"] == history_task.interrupts + assert stream_task.get("error") == history_task.error + assert stream_task.get("state") == history_task.state From f9e900f39a7c3bc8df1868c126639597ceb9b29a Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 19:58:42 +0200 Subject: [PATCH 08/19] Fix 3.9 --- libs/langgraph/tests/test_pregel_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index bb8d4ff25..63a2f75f7 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -9871,7 +9871,7 @@ async def _node(state: State): assert len(checkpoint_events) == len(checkpoint_history) - def normalize_config(config: dict | None) -> dict | None: + def normalize_config(config: Optional[dict, None]) -> Optional[dict, None]: if config is None: return None return config["configurable"] From b51f5d6345207ba83476f0ac1d10faf260378b9e Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 20:09:51 +0200 Subject: [PATCH 09/19] Fix optional types --- libs/langgraph/tests/test_pregel.py | 2 +- libs/langgraph/tests/test_pregel_async.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langgraph/tests/test_pregel.py b/libs/langgraph/tests/test_pregel.py index 7357a0f43..0e77b65c9 100644 --- a/libs/langgraph/tests/test_pregel.py +++ b/libs/langgraph/tests/test_pregel.py @@ -11634,7 +11634,7 @@ def _node(state: State): assert len(checkpoint_events) == len(checkpoint_history) - def normalize_config(config: dict | None) -> dict | None: + def normalize_config(config: Optional[dict]) -> Optional[dict]: if config is None: return None return config["configurable"] diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index 63a2f75f7..92b1ebf9a 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -9871,7 +9871,7 @@ async def _node(state: State): assert len(checkpoint_events) == len(checkpoint_history) - def normalize_config(config: Optional[dict, None]) -> Optional[dict, None]: + def normalize_config(config: Optional[dict]) -> Optional[dict]: if config is None: return None return config["configurable"] From 45a12c938b3f10ef9a971c1dacccc9a1c6375035 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 20:49:16 +0200 Subject: [PATCH 10/19] Add more nested tests for nested subgraphs --- libs/langgraph/tests/test_pregel.py | 108 +++++++++++++++++++-- libs/langgraph/tests/test_pregel_async.py | 110 ++++++++++++++++++++-- 2 files changed, 202 insertions(+), 16 deletions(-) diff --git a/libs/langgraph/tests/test_pregel.py b/libs/langgraph/tests/test_pregel.py index 0e77b65c9..e75df2245 100644 --- a/libs/langgraph/tests/test_pregel.py +++ b/libs/langgraph/tests/test_pregel.py @@ -11594,7 +11594,6 @@ def _node(state: State): return _node - grand_parent = StateGraph(State) parent = StateGraph(State) child = StateGraph(State) @@ -11610,13 +11609,7 @@ def _node(state: State): parent.add_edge("p_one", "p_two") parent.add_edge("p_two", END) - grand_parent.add_node("gp_one", node("gp_one")) - grand_parent.add_node("gp_two", parent.compile()) - grand_parent.add_edge(START, "gp_one") - grand_parent.add_edge("gp_one", "gp_two") - grand_parent.add_edge("gp_two", END) - - graph = grand_parent.compile(checkpointer=MemorySaver()) + graph = parent.compile(checkpointer=MemorySaver()) config = {"configurable": {"thread_id": "1"}} events = [ @@ -11654,3 +11647,102 @@ def normalize_config(config: Optional[dict]) -> Optional[dict]: assert stream_task["interrupts"] == history_task.interrupts assert stream_task.get("error") == history_task.error assert stream_task.get("state") == history_task.state + + +def test_debug_nested_subgraphs(): + from collections import defaultdict + + class State(TypedDict): + messages: Annotated[list[str], operator.add] + + def node(name): + def _node(state: State): + return {"messages": [f"entered {name} node"]} + + return _node + + grand_parent = StateGraph(State) + parent = StateGraph(State) + child = StateGraph(State) + + child.add_node("c_one", node("c_one")) + child.add_node("c_two", node("c_two")) + child.add_edge(START, "c_one") + child.add_edge("c_one", "c_two") + child.add_edge("c_two", END) + + parent.add_node("p_one", node("p_one")) + parent.add_node("p_two", child.compile()) + parent.add_edge(START, "p_one") + parent.add_edge("p_one", "p_two") + parent.add_edge("p_two", END) + + grand_parent.add_node("gp_one", node("gp_one")) + grand_parent.add_node("gp_two", parent.compile()) + grand_parent.add_edge(START, "gp_one") + grand_parent.add_edge("gp_one", "gp_two") + grand_parent.add_edge("gp_two", END) + + graph = grand_parent.compile(checkpointer=MemorySaver()) + + config = {"configurable": {"thread_id": "1"}} + events = [ + *graph.stream( + {"messages": []}, + config=config, + stream_mode="debug", + subgraphs=True, + ) + ] + + stream_ns: dict[tuple, dict] = defaultdict(list) + for ns, e in events: + if e["type"] == "checkpoint": + stream_ns[ns].append(e["payload"]) + + assert list(stream_ns.keys()) == [ + (), + (AnyStr("gp_two:"),), + (AnyStr("gp_two:"), AnyStr("p_two:")), + ] + + history_ns = { + ns: list( + graph.get_state_history( + {"configurable": {"thread_id": "1", "checkpoint_ns": "|".join(ns)}} + ) + )[::-1] + for ns in stream_ns.keys() + } + + def normalize_config(config: Optional[dict]) -> Optional[dict]: + if config is None: + return None + + clean_config = {} + clean_config["thread_id"] = config["configurable"]["thread_id"] + clean_config["checkpoint_id"] = config["configurable"]["checkpoint_id"] + clean_config["checkpoint_ns"] = config["configurable"]["checkpoint_ns"] + + return clean_config + + for checkpoint_events, checkpoint_history in zip( + stream_ns.values(), history_ns.values() + ): + for stream, history in zip(checkpoint_events, checkpoint_history): + assert stream["values"] == history.values + assert stream["next"] == list(history.next) + assert normalize_config(stream["config"]) == normalize_config( + history.config + ) + assert normalize_config(stream["parent_config"]) == normalize_config( + history.parent_config + ) + + assert len(stream["tasks"]) == len(history.tasks) + for stream_task, history_task in zip(stream["tasks"], history.tasks): + assert stream_task["id"] == history_task.id + assert stream_task["name"] == history_task.name + assert stream_task["interrupts"] == history_task.interrupts + assert stream_task.get("error") == history_task.error + assert stream_task.get("state") == history_task.state diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index 92b1ebf9a..b2d1d6538 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -9830,7 +9830,6 @@ async def _node(state: State): return _node - grand_parent = StateGraph(State) parent = StateGraph(State) child = StateGraph(State) @@ -9846,13 +9845,7 @@ async def _node(state: State): parent.add_edge("p_one", "p_two") parent.add_edge("p_two", END) - grand_parent.add_node("gp_one", node("gp_one")) - grand_parent.add_node("gp_two", parent.compile()) - grand_parent.add_edge(START, "gp_one") - grand_parent.add_edge("gp_one", "gp_two") - grand_parent.add_edge("gp_two", END) - - graph = grand_parent.compile(checkpointer=MemorySaver()) + graph = parent.compile(checkpointer=MemorySaver()) config = {"configurable": {"thread_id": "1"}} events = [ @@ -9891,3 +9884,104 @@ def normalize_config(config: Optional[dict]) -> Optional[dict]: assert stream_task["interrupts"] == history_task.interrupts assert stream_task.get("error") == history_task.error assert stream_task.get("state") == history_task.state + + +async def test_debug_nested_subgraphs(): + from collections import defaultdict + + class State(TypedDict): + messages: Annotated[list[str], operator.add] + + def node(name): + async def _node(state: State): + return {"messages": [f"entered {name} node"]} + + return _node + + grand_parent = StateGraph(State) + parent = StateGraph(State) + child = StateGraph(State) + + child.add_node("c_one", node("c_one")) + child.add_node("c_two", node("c_two")) + child.add_edge(START, "c_one") + child.add_edge("c_one", "c_two") + child.add_edge("c_two", END) + + parent.add_node("p_one", node("p_one")) + parent.add_node("p_two", child.compile()) + parent.add_edge(START, "p_one") + parent.add_edge("p_one", "p_two") + parent.add_edge("p_two", END) + + grand_parent.add_node("gp_one", node("gp_one")) + grand_parent.add_node("gp_two", parent.compile()) + grand_parent.add_edge(START, "gp_one") + grand_parent.add_edge("gp_one", "gp_two") + grand_parent.add_edge("gp_two", END) + + graph = grand_parent.compile(checkpointer=MemorySaver()) + + config = {"configurable": {"thread_id": "1"}} + events = [ + c + async for c in graph.astream( + {"messages": []}, + config=config, + stream_mode="debug", + subgraphs=True, + ) + ] + + stream_ns: dict[tuple, dict] = defaultdict(list) + for ns, e in events: + if e["type"] == "checkpoint": + stream_ns[ns].append(e["payload"]) + + assert list(stream_ns.keys()) == [ + (), + (AnyStr("gp_two:"),), + (AnyStr("gp_two:"), AnyStr("p_two:")), + ] + + history_ns = { + ns: [ + c + async for c in graph.aget_state_history( + {"configurable": {"thread_id": "1", "checkpoint_ns": "|".join(ns)}} + ) + ][::-1] + for ns in stream_ns.keys() + } + + def normalize_config(config: Optional[dict]) -> Optional[dict]: + if config is None: + return None + + clean_config = {} + clean_config["thread_id"] = config["configurable"]["thread_id"] + clean_config["checkpoint_id"] = config["configurable"]["checkpoint_id"] + clean_config["checkpoint_ns"] = config["configurable"]["checkpoint_ns"] + + return clean_config + + for checkpoint_events, checkpoint_history in zip( + stream_ns.values(), history_ns.values() + ): + for stream, history in zip(checkpoint_events, checkpoint_history): + assert stream["values"] == history.values + assert stream["next"] == list(history.next) + assert normalize_config(stream["config"]) == normalize_config( + history.config + ) + assert normalize_config(stream["parent_config"]) == normalize_config( + history.parent_config + ) + + assert len(stream["tasks"]) == len(history.tasks) + for stream_task, history_task in zip(stream["tasks"], history.tasks): + assert stream_task["id"] == history_task.id + assert stream_task["name"] == history_task.name + assert stream_task["interrupts"] == history_task.interrupts + assert stream_task.get("error") == history_task.error + assert stream_task.get("state") == history_task.state From 7a282f82dc938378696211f4402da24e855f94f4 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 8 Oct 2024 20:55:49 +0200 Subject: [PATCH 11/19] Tests? --- libs/langgraph/tests/test_pregel_async.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index b2d1d6538..3be52643b 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -9944,15 +9944,19 @@ async def _node(state: State): (AnyStr("gp_two:"), AnyStr("p_two:")), ] - history_ns = { - ns: [ - c - async for c in graph.aget_state_history( - {"configurable": {"thread_id": "1", "checkpoint_ns": "|".join(ns)}} - ) - ][::-1] - for ns in stream_ns.keys() - } + history_ns = {} + for ns in stream_ns.keys(): + + async def get_history(): + history = [ + c + async for c in graph.aget_state_history( + {"configurable": {"thread_id": "1", "checkpoint_ns": "|".join(ns)}} + ) + ] + return history[::-1] + + history_ns[ns] = await get_history() def normalize_config(config: Optional[dict]) -> Optional[dict]: if config is None: From db61d294a675c0c5559af1e8832cf05fee241809 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Wed, 9 Oct 2024 01:00:06 +0200 Subject: [PATCH 12/19] Code review --- libs/langgraph/langgraph/pregel/utils.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/libs/langgraph/langgraph/pregel/utils.py b/libs/langgraph/langgraph/pregel/utils.py index 3f5fe54d1..bb98cf63b 100644 --- a/libs/langgraph/langgraph/pregel/utils.py +++ b/libs/langgraph/langgraph/pregel/utils.py @@ -26,11 +26,16 @@ def get_new_channel_versions( def find_subgraph_pregel(candidate: Runnable) -> Optional[Runnable]: + from langgraph.pregel import Pregel + candidates: list[Runnable] = [candidate] for c in candidates: - # Cannot do isinstance(c, Pregel) due to circular imports - if "Pregel" in [t.__name__ for t in type(c).__mro__]: + if ( + isinstance(c, Pregel) + # subgraphs that disabled checkpointing are not considered + and candidate.checkpointer is not False + ): return c elif isinstance(c, RunnableSequence) or isinstance(c, RunnableSeq): candidates.extend(c.steps) From 0628c6402f135aa3525d77b48d1c774d986d59a6 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Wed, 9 Oct 2024 01:00:36 +0200 Subject: [PATCH 13/19] Fix typo --- libs/langgraph/langgraph/pregel/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/langgraph/pregel/utils.py b/libs/langgraph/langgraph/pregel/utils.py index bb98cf63b..2b09f8f75 100644 --- a/libs/langgraph/langgraph/pregel/utils.py +++ b/libs/langgraph/langgraph/pregel/utils.py @@ -34,7 +34,7 @@ def find_subgraph_pregel(candidate: Runnable) -> Optional[Runnable]: if ( isinstance(c, Pregel) # subgraphs that disabled checkpointing are not considered - and candidate.checkpointer is not False + and c.checkpointer is not False ): return c elif isinstance(c, RunnableSequence) or isinstance(c, RunnableSeq): From 598bb5a641d0f76c23e498657dfd675f45de41f4 Mon Sep 17 00:00:00 2001 From: Andrew Nguonly Date: Tue, 8 Oct 2024 12:04:26 -0700 Subject: [PATCH 14/19] sdk-py: Update return type annotation for `Thread.update_state()` methods. (#2050) ### Summary The response body of the endpoint `POST /threads/{thread_id}/state` looks like this: ``` { "checkpoint": { "thread_id": "e2496803-ecd5-4e0c-a779-3226296181c2", "checkpoint_ns": "", "checkpoint_id": "1ef4a9b8-e6fb-67b1-8001-abd5184439d1", "checkpoint_map": {} } } ``` --- libs/sdk-py/langgraph_sdk/client.py | 37 ++++++++++++++++++++++++----- libs/sdk-py/langgraph_sdk/schema.py | 7 ++++++ 2 files changed, 38 insertions(+), 6 deletions(-) diff --git a/libs/sdk-py/langgraph_sdk/client.py b/libs/sdk-py/langgraph_sdk/client.py index 8d7fccdc6..409fcd545 100644 --- a/libs/sdk-py/langgraph_sdk/client.py +++ b/libs/sdk-py/langgraph_sdk/client.py @@ -53,6 +53,7 @@ Thread, ThreadState, ThreadStatus, + ThreadUpdateStateResponse, ) logger = logging.getLogger(__name__) @@ -1060,7 +1061,7 @@ async def update_state( as_node: Optional[str] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, # deprecated - ) -> None: + ) -> ThreadUpdateStateResponse: """Update the state of a thread. Args: @@ -1070,15 +1071,27 @@ async def update_state( checkpoint: The checkpoint to update the state of. Returns: - None + ThreadUpdateStateResponse: Response after updating a thread's state. Example Usage: - await client.threads.update_state( + response = await client.threads.update_state( thread_id="my_thread_id", values={"messages":[{"role": "user", "content": "hello!"}]}, as_node="my_node", ) + print(response) + + ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + { + 'checkpoint': { + 'thread_id': 'e2496803-ecd5-4e0c-a779-3226296181c2', + 'checkpoint_ns': '', + 'checkpoint_id': '1ef4a9b8-e6fb-67b1-8001-abd5184439d1', + 'checkpoint_map': {} + } + } """ # noqa: E501 payload: Dict[str, Any] = { @@ -3109,7 +3122,7 @@ def update_state( as_node: Optional[str] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, # deprecated - ) -> None: + ) -> ThreadUpdateStateResponse: """Update the state of a thread. Args: @@ -3119,15 +3132,27 @@ def update_state( checkpoint: The checkpoint to update the state of. Returns: - None + ThreadUpdateStateResponse: Response after updating a thread's state. Example Usage: - await client.threads.update_state( + response = client.threads.update_state( thread_id="my_thread_id", values={"messages":[{"role": "user", "content": "hello!"}]}, as_node="my_node", ) + print(response) + + ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + { + 'checkpoint': { + 'thread_id': 'e2496803-ecd5-4e0c-a779-3226296181c2', + 'checkpoint_ns': '', + 'checkpoint_id': '1ef4a9b8-e6fb-67b1-8001-abd5184439d1', + 'checkpoint_map': {} + } + } """ # noqa: E501 payload: Dict[str, Any] = { diff --git a/libs/sdk-py/langgraph_sdk/schema.py b/libs/sdk-py/langgraph_sdk/schema.py index c6101bdce..bc7bf558e 100644 --- a/libs/sdk-py/langgraph_sdk/schema.py +++ b/libs/sdk-py/langgraph_sdk/schema.py @@ -207,6 +207,13 @@ class ThreadState(TypedDict): """Tasks to execute in this step. If already attempted, may contain an error.""" +class ThreadUpdateStateResponse(TypedDict): + """Represents the response from updating a thread's state.""" + + checkpoint: Checkpoint + """Checkpoint of the latest state.""" + + class Run(TypedDict): """Represents a single execution run.""" From 6d4a426059ff6126c47bc9f3a0a7f5a9b2505b2f Mon Sep 17 00:00:00 2001 From: Andrew Nguonly Date: Tue, 8 Oct 2024 12:44:37 -0700 Subject: [PATCH 15/19] sdk-py: Add `custom` stream mode to `StreamMode` type (#2051) ### Summary The LangGraph API supports `custom` stream mode type. Reference: https://github.com/langchain-ai/langgraph/blob/main/docs/docs/cloud/reference/api/openapi.json#L2403 --- libs/sdk-py/langgraph_sdk/schema.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/sdk-py/langgraph_sdk/schema.py b/libs/sdk-py/langgraph_sdk/schema.py index bc7bf558e..19014a76c 100644 --- a/libs/sdk-py/langgraph_sdk/schema.py +++ b/libs/sdk-py/langgraph_sdk/schema.py @@ -25,7 +25,7 @@ - "interrupted": The thread's execution was interrupted. """ -StreamMode = Literal["values", "messages", "updates", "events", "debug"] +StreamMode = Literal["values", "messages", "updates", "events", "debug", "custom"] """ Defines the mode of streaming: - "values": Stream only the values. @@ -33,6 +33,7 @@ - "updates": Stream updates to the state. - "events": Stream events occurring during execution. - "debug": Stream detailed debug information. +- "custom": Stream custom events. """ DisconnectMode = Literal["cancel", "continue"] From 9a752e15630fe87ccbe98a774395eb529a271e58 Mon Sep 17 00:00:00 2001 From: Andrew Nguonly Date: Tue, 8 Oct 2024 13:29:11 -0700 Subject: [PATCH 16/19] sdk-py: Add `"*"` literal to `interrupt_before` and `interrupt_after` types (#2053) ### Summary The LangGraph API supports the literal string `"*"` for `interrupt_before` and `interrupt_after`. Reference: https://github.com/langchain-ai/langgraph-api/blob/main/api/openapi.json#L2425 --- libs/sdk-py/langgraph_sdk/client.py | 89 +++++++++++++++-------------- 1 file changed, 45 insertions(+), 44 deletions(-) diff --git a/libs/sdk-py/langgraph_sdk/client.py b/libs/sdk-py/langgraph_sdk/client.py index 409fcd545..e332a35d9 100644 --- a/libs/sdk-py/langgraph_sdk/client.py +++ b/libs/sdk-py/langgraph_sdk/client.py @@ -31,6 +31,7 @@ import langgraph_sdk from langgraph_sdk.schema import ( + All, Assistant, AssistantVersion, Checkpoint, @@ -1174,8 +1175,8 @@ def stream( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, feedback_keys: Optional[list[str]] = None, on_disconnect: Optional[DisconnectMode] = None, webhook: Optional[str] = None, @@ -1194,8 +1195,8 @@ def stream( stream_subgraphs: bool = False, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, feedback_keys: Optional[list[str]] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -1215,8 +1216,8 @@ def stream( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, feedback_keys: Optional[list[str]] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -1316,8 +1317,8 @@ async def create( stream_subgraphs: bool = False, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_completion: Optional[OnCompletionBehavior] = None, after_seconds: Optional[int] = None, @@ -1336,8 +1337,8 @@ async def create( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[MultitaskStrategy] = None, after_seconds: Optional[int] = None, @@ -1355,8 +1356,8 @@ async def create( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[MultitaskStrategy] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -1494,8 +1495,8 @@ async def wait( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_disconnect: Optional[DisconnectMode] = None, multitask_strategy: Optional[MultitaskStrategy] = None, @@ -1511,8 +1512,8 @@ async def wait( input: Optional[dict] = None, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -1529,8 +1530,8 @@ async def wait( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -1793,8 +1794,8 @@ async def create_for_thread( input: Optional[dict] = None, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[str] = None, ) -> Run: @@ -1858,8 +1859,8 @@ async def create( input: Optional[dict] = None, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[str] = None, ) -> Run: @@ -3237,8 +3238,8 @@ def stream( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, feedback_keys: Optional[list[str]] = None, on_disconnect: Optional[DisconnectMode] = None, webhook: Optional[str] = None, @@ -3257,8 +3258,8 @@ def stream( stream_subgraphs: bool = False, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, feedback_keys: Optional[list[str]] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -3278,8 +3279,8 @@ def stream( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, feedback_keys: Optional[list[str]] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -3379,8 +3380,8 @@ def create( stream_subgraphs: bool = False, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_completion: Optional[OnCompletionBehavior] = None, after_seconds: Optional[int] = None, @@ -3399,8 +3400,8 @@ def create( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[MultitaskStrategy] = None, after_seconds: Optional[int] = None, @@ -3418,8 +3419,8 @@ def create( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[MultitaskStrategy] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -3557,8 +3558,8 @@ def wait( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_disconnect: Optional[DisconnectMode] = None, multitask_strategy: Optional[MultitaskStrategy] = None, @@ -3574,8 +3575,8 @@ def wait( input: Optional[dict] = None, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -3592,8 +3593,8 @@ def wait( config: Optional[Config] = None, checkpoint: Optional[Checkpoint] = None, checkpoint_id: Optional[str] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, on_disconnect: Optional[DisconnectMode] = None, on_completion: Optional[OnCompletionBehavior] = None, @@ -3846,8 +3847,8 @@ def create_for_thread( input: Optional[dict] = None, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[str] = None, ) -> Run: @@ -3911,8 +3912,8 @@ def create( input: Optional[dict] = None, metadata: Optional[dict] = None, config: Optional[Config] = None, - interrupt_before: Optional[list[str]] = None, - interrupt_after: Optional[list[str]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, webhook: Optional[str] = None, multitask_strategy: Optional[str] = None, ) -> Run: From c4d251b05c4b61eea784a6dd0c2a09369d4c04a2 Mon Sep 17 00:00:00 2001 From: Andrew Nguonly Date: Tue, 8 Oct 2024 13:49:27 -0700 Subject: [PATCH 17/19] sdk-py: Add `Sequence[dict]` type to `values` param type for `update_state()` (#2054) ### Summary The LangGraph API supports a list of `dict` for the `values` field for the `POST /threads//state` endpoint. Reference: https://github.com/langchain-ai/langgraph-api/blob/main/api/openapi.json#L3005-L3021 --- libs/sdk-py/langgraph_sdk/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/sdk-py/langgraph_sdk/client.py b/libs/sdk-py/langgraph_sdk/client.py index e332a35d9..f89a0f6a5 100644 --- a/libs/sdk-py/langgraph_sdk/client.py +++ b/libs/sdk-py/langgraph_sdk/client.py @@ -1057,7 +1057,7 @@ async def get_state( async def update_state( self, thread_id: str, - values: dict, + values: Optional[Union[dict, Sequence[dict]]], *, as_node: Optional[str] = None, checkpoint: Optional[Checkpoint] = None, @@ -3118,7 +3118,7 @@ def get_state( def update_state( self, thread_id: str, - values: dict, + values: Optional[Union[dict, Sequence[dict]]], *, as_node: Optional[str] = None, checkpoint: Optional[Checkpoint] = None, From d2c359f7c932da93a41fcb209fa80738a559f7c9 Mon Sep 17 00:00:00 2001 From: Andrew Nguonly Date: Tue, 8 Oct 2024 16:44:01 -0700 Subject: [PATCH 18/19] docs: Update LangGraph API docs (#2056) ### Summary Adding endpoints for `/subgraphs` and `/store`. --- docs/docs/cloud/reference/api/openapi.json | 749 +++++++++++++++------ 1 file changed, 529 insertions(+), 220 deletions(-) diff --git a/docs/docs/cloud/reference/api/openapi.json b/docs/docs/cloud/reference/api/openapi.json index fb39de0c5..aab93c6cd 100644 --- a/docs/docs/cloud/reference/api/openapi.json +++ b/docs/docs/cloud/reference/api/openapi.json @@ -7,9 +7,7 @@ "paths": { "/assistants": { "post": { - "tags": [ - "assistants/create" - ], + "tags": ["assistants/create"], "summary": "Create Assistant", "description": "Create an assistant.", "operationId": "create_assistant_assistants_post", @@ -49,9 +47,7 @@ }, "/assistants/search": { "post": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Search Assistants", "description": "List assistants.", "operationId": "search_assistants_assistants_search_post", @@ -95,9 +91,7 @@ }, "/assistants/{assistant_id}": { "get": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Get Assistant", "description": "Get an assistant by ID.", "operationId": "get_assistant_assistants__assistant_id__get", @@ -139,9 +133,7 @@ } }, "delete": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Delete Assistant", "description": "Delete an assistant by ID.", "operationId": "delete_assistant_assistants__assistant_id__delete", @@ -181,9 +173,7 @@ } }, "patch": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Patch Assistant", "description": "Update an assistant.", "operationId": "patch_assistant_assistants__assistant_id__patch", @@ -237,9 +227,7 @@ }, "/assistants/{assistant_id}/graph": { "get": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Get Assistant Graph", "description": "Get an assistant by ID.", "operationId": "get_assistant_graph_assistants__assistant_id__graph_get", @@ -300,11 +288,127 @@ } } }, - "/assistants/{assistant_id}/schemas": { + "/assistants/{assistant_id}/subgraphs": { + "get": { + "tags": ["assistants/manage"], + "summary": "Get Assistant Subgraphs", + "description": "Get an assistant's subgraphs.", + "operationId": "get_assistant_subgraphs_assistants__assistant_id__subgraphs_get", + "parameters": [ + { + "description": "The ID of the assistant.", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Assistant Id" + }, + "name": "assistant_id", + "in": "path" + }, + { + "description": "Recursively retrieve subgraphs of subgraphs.", + "required": false, + "schema": { + "type": "boolean", + "title": "Recurse", + "default": false + }, + "name": "recurse", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Subgraphs" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/assistants/{assistant_id}/subgraphs/{namespace}": { "get": { - "tags": [ - "assistants/manage" + "tags": ["assistants/manage"], + "summary": "Get Assistant Subgraphs by Namespace", + "description": "Get an assistant's subgraphs filtered by namespace.", + "operationId": "get_assistant_subgraphs_assistants__assistant_id__subgraphs__namespace__get", + "parameters": [ + { + "description": "The ID of the assistant.", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Assistant Id" + }, + "name": "assistant_id", + "in": "path" + }, + { + "description": "Namespace of the subgraph to filter by.", + "required": true, + "schema": { + "type": "string", + "title": "Namespace" + }, + "name": "namespace", + "in": "path" + }, + { + "description": "Recursively retrieve subgraphs of subgraphs.", + "required": false, + "schema": { + "type": "boolean", + "title": "Recurse", + "default": false + }, + "name": "recurse", + "in": "query" + } ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Subgraphs" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/assistants/{assistant_id}/schemas": { + "get": { + "tags": ["assistants/manage"], "summary": "Get Assistant Schemas", "description": "Get an assistant by ID.", "operationId": "get_assistant_schemas_assistants__assistant_id__schemas_get", @@ -348,9 +452,7 @@ }, "/assistants/{assistant_id}/versions": { "post": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Get Assistant Versions", "description": "Get all versions of an assistant.", "operationId": "get_assistant_versions_assistants__assistant_id__versions_get", @@ -398,9 +500,7 @@ }, "/assistants/{assistant_id}/change_version": { "post": { - "tags": [ - "assistants/manage" - ], + "tags": ["assistants/manage"], "summary": "Change Assistant Version", "description": "Change the version of an assistant.", "operationId": "change_assistant_version__assistant_id__change_version__version_post", @@ -455,9 +555,7 @@ }, "/threads": { "post": { - "tags": [ - "threads/create" - ], + "tags": ["threads/create"], "summary": "Create Thread", "description": "Create a thread.", "operationId": "create_thread_threads_post", @@ -497,9 +595,7 @@ }, "/threads/search": { "post": { - "tags": [ - "threads/manage" - ], + "tags": ["threads/manage"], "summary": "Search Threads", "description": "List threads.", "operationId": "search_threads_threads_search_post", @@ -543,9 +639,7 @@ }, "/threads/{thread_id}/state": { "get": { - "tags": [ - "threads/state" - ], + "tags": ["threads/state"], "summary": "Get Latest Thread State", "description": "Get state for a thread.", "operationId": "get_latest_thread_state_threads__thread_id__state_get", @@ -587,9 +681,7 @@ } }, "post": { - "tags": [ - "threads/state" - ], + "tags": ["threads/state"], "summary": "Update Thread State", "description": "Add state to a thread.", "operationId": "update_thread_state_threads__thread_id__state_post", @@ -643,9 +735,7 @@ }, "/threads/{thread_id}/state/checkpoint": { "post": { - "tags": [ - "threads/state" - ], + "tags": ["threads/state"], "summary": "Get Thread State At Checkpoint", "description": "Get state for a thread at a specific checkpoint.", "operationId": "post_thread_state_at_checkpoint_threads__thread_id__state__checkpoint_id__get", @@ -685,9 +775,7 @@ }, "/threads/{thread_id}/history": { "get": { - "tags": [ - "threads/state" - ], + "tags": ["threads/state"], "summary": "Get Thread History", "description": "Get all past states for a thread.", "operationId": "get_thread_history_threads__thread_id__history_get", @@ -752,9 +840,7 @@ } }, "post": { - "tags": [ - "threads/state" - ], + "tags": ["threads/state"], "summary": "Get Thread History Post", "description": "Get all past states for a thread.", "operationId": "get_thread_history_post_threads__thread_id__history_post", @@ -812,9 +898,7 @@ }, "/threads/{thread_id}": { "get": { - "tags": [ - "threads/manage" - ], + "tags": ["threads/manage"], "summary": "Get Thread", "description": "Get a thread by ID.", "operationId": "get_thread_threads__thread_id__get", @@ -856,9 +940,7 @@ } }, "delete": { - "tags": [ - "threads/manage" - ], + "tags": ["threads/manage"], "summary": "Delete Thread", "description": "Delete a thread by ID.", "operationId": "delete_thread_threads__thread_id__delete", @@ -898,9 +980,7 @@ } }, "patch": { - "tags": [ - "threads/manage" - ], + "tags": ["threads/manage"], "summary": "Patch Thread", "description": "Update a thread.", "operationId": "patch_thread_threads__thread_id__patch", @@ -954,9 +1034,7 @@ }, "/threads/{thread_id}/runs": { "get": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "List Runs", "description": "List runs for a thread.", "operationId": "list_runs_http_threads__thread_id__runs_get", @@ -1021,9 +1099,7 @@ } }, "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Background Run", "description": "Create a run, return immediately.", "operationId": "create_run_threads__thread_id__runs_post", @@ -1077,9 +1153,7 @@ }, "/threads/{thread_id}/runs/crons": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Thread Cron", "description": "Create a cron to schedule runs on a thread.", "operationId": "create_thread_cron_threads__thread_id__runs_crons_post", @@ -1133,9 +1207,7 @@ }, "/threads/{thread_id}/runs/stream": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Streaming Run", "description": "Create a run, stream the output.", "operationId": "stream_run_threads__thread_id__runs_stream_post", @@ -1187,9 +1259,7 @@ }, "/threads/{thread_id}/runs/wait": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Run and Get Output", "description": "Create a run, return the final output.", "operationId": "wait_run_threads__thread_id__runs_wait_post", @@ -1241,9 +1311,7 @@ }, "/threads/{thread_id}/runs/{run_id}": { "get": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "Get Run", "description": "Get a run by ID.", "operationId": "get_run_http_threads__thread_id__runs__run_id__get", @@ -1297,9 +1365,7 @@ } }, "delete": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "Delete Run", "description": "Delete a run by ID.", "operationId": "delete_run_threads__thread_id__runs__run_id__delete", @@ -1353,9 +1419,7 @@ }, "/threads/{thread_id}/runs/{run_id}/join": { "get": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "Join Run", "description": "Wait for a run to finish.", "operationId": "join_run_http_threads__thread_id__runs__run_id__join_get", @@ -1409,9 +1473,7 @@ }, "/threads/{thread_id}/runs/{run_id}/stream": { "get": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "Join Run Stream", "description": "Join a run stream. This endpoint streams output in real-time from a run similar to the /threads/__THREAD_ID__/runs/stream endpoint. Only output produced after this endpoint is called will be streamed.", "operationId": "stream_run_http_threads__thread_id__runs__run_id__join_get", @@ -1465,9 +1527,7 @@ }, "/threads/{thread_id}/runs/{run_id}/cancel": { "post": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "Cancel Run", "operationId": "cancel_run_http_threads__thread_id__runs__run_id__cancel_post", "parameters": [ @@ -1530,9 +1590,7 @@ }, "/runs/crons": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Cron", "description": "Create a cron to schedule runs on new threads.", "operationId": "create_cron_runs_crons_post", @@ -1572,9 +1630,7 @@ }, "/runs/crons/search": { "post": { - "tags": [ - "crons/search" - ], + "tags": ["crons/search"], "summary": "Search Crons", "description": "Search all active crons", "operationId": "search_crons_runs_crons_post", @@ -1618,9 +1674,7 @@ }, "/runs/stream": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Stream Run in new Thread", "description": "Create a run in a new thread, stream the output.", "operationId": "stream_run_stateless_runs_stream_post", @@ -1658,9 +1712,7 @@ }, "/runs/wait": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Run in new Thread and Get Output", "description": "Create a run in a new thread, return the final output.", "operationId": "wait_run_stateless_runs_wait_post", @@ -1698,9 +1750,7 @@ }, "/runs": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Background Run in new Thread", "description": "Create a run in a new thread, return immediately.", "operationId": "run_stateless_runs_post", @@ -1738,9 +1788,7 @@ }, "/runs/batch": { "post": { - "tags": [ - "runs/create" - ], + "tags": ["runs/create"], "summary": "Create Run Batch", "description": "Create a batch of runs in new threads, return immediately.", "operationId": "run_batch_stateless_runs_post", @@ -1778,9 +1826,7 @@ }, "/runs/crons/{cron_id}": { "delete": { - "tags": [ - "runs/manage" - ], + "tags": ["runs/manage"], "summary": "Delete Cron", "description": "Delete a cron by ID.", "operationId": "delete_cron_runs_crons__cron_id__delete", @@ -1817,6 +1863,170 @@ } } } + }, + + "/store/items": { + "put": { + "summary": "Store or update an item.", + "operationId": "put_item", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/StorePutRequest" } + } + } + }, + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "delete": { + "summary": "Delete an item.", + "operationId": "delete_item", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/StoreDeleteRequest" } + } + } + }, + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "summary": "Retrieve a single item.", + "operationId": "get_item", + "parameters": [ + { + "name": "key", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "namespace", + "in": "query", + "required": false, + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Item" } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/store/items/search": { + "post": { + "summary": "Search for items within a namespace prefix.", + "operationId": "search_items", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/StoreSearchRequest" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SearchItemsResponse" } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/store/namespaces": { + "post": { + "summary": "List namespaces with optional match conditions.", + "operationId": "list_namespaces", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StoreListNamespacesRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListNamespaceResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } } }, "components": { @@ -1912,9 +2122,7 @@ } }, "type": "object", - "required": [ - "graph_id" - ], + "required": ["graph_id"], "title": "AssistantCreate", "description": "Payload for creating an assistant." }, @@ -2088,9 +2296,7 @@ "anyOf": [ { "type": "string", - "enum": [ - "*" - ] + "enum": ["*"] }, { "items": { @@ -2105,9 +2311,7 @@ "anyOf": [ { "type": "string", - "enum": [ - "*" - ] + "enum": ["*"] }, { "items": { @@ -2120,21 +2324,13 @@ }, "multitask_strategy": { "type": "string", - "enum": [ - "reject", - "rollback", - "interrupt", - "enqueue" - ], + "enum": ["reject", "rollback", "interrupt", "enqueue"], "title": "Multitask Strategy", "default": "reject" } }, "type": "object", - "required": [ - "assistant_id", - "schedule" - ], + "required": ["assistant_id", "schedule"], "title": "CronCreate", "description": "Payload for creating a cron." }, @@ -2195,13 +2391,40 @@ } }, "type": "object", - "required": [ - "graph_id", - "state_schema", - "config_schema" - ], + "required": ["graph_id", "state_schema", "config_schema"], "title": "GraphSchema" }, + "GraphSchemaNoId": { + "properties": { + "input_schema": { + "type": "object", + "title": "Input Schema" + }, + "output_schema": { + "type": "object", + "title": "Input Schema" + }, + "state_schema": { + "type": "object", + "title": "State Schema" + }, + "config_schema": { + "type": "object", + "title": "Config Schema" + } + }, + "type": "object", + "required": ["input_schema", "output_schema", "state_schema", "config_schema"], + "title": "GraphSchemaNoId" + }, + "Subgraphs": { + "type": "object", + "additionalProperties": { + "$ref": "#/components/schemas/GraphSchemaNoId" + }, + "title": "Subgraphs", + "description": "Map of graph name to graph schema metadata (`input_schema`, `output_schema`, `state_schema`, `config_schema`)." + }, "HTTPValidationError": { "properties": { "detail": { @@ -2244,13 +2467,7 @@ }, "status": { "type": "string", - "enum": [ - "pending", - "error", - "success", - "timeout", - "interrupted" - ], + "enum": ["pending", "error", "success", "timeout", "interrupted"], "title": "Status" }, "metadata": { @@ -2263,12 +2480,7 @@ }, "multitask_strategy": { "type": "string", - "enum": [ - "reject", - "rollback", - "interrupt", - "enqueue" - ], + "enum": ["reject", "rollback", "interrupt", "enqueue"], "title": "Multitask Strategy" } }, @@ -2359,9 +2571,7 @@ "anyOf": [ { "type": "string", - "enum": [ - "*" - ] + "enum": ["*"] }, { "items": { @@ -2376,9 +2586,7 @@ "anyOf": [ { "type": "string", - "enum": [ - "*" - ] + "enum": ["*"] }, { "items": { @@ -2418,16 +2626,11 @@ } ], "title": "Stream Mode", - "default": [ - "values" - ] + "default": ["values"] }, "on_disconnect": { "type": "string", - "enum": [ - "cancel", - "continue" - ], + "enum": ["cancel", "continue"], "title": "On Disconnect", "default": "cancel" }, @@ -2440,12 +2643,7 @@ }, "multitask_strategy": { "type": "string", - "enum": [ - "reject", - "rollback", - "interrupt", - "enqueue" - ], + "enum": ["reject", "rollback", "interrupt", "enqueue"], "title": "Multitask Strategy", "default": "reject" }, @@ -2456,9 +2654,7 @@ } }, "type": "object", - "required": [ - "assistant_id" - ], + "required": ["assistant_id"], "title": "RunCreateStateful", "description": "Payload for creating a run." }, @@ -2540,9 +2736,7 @@ "anyOf": [ { "type": "string", - "enum": [ - "*" - ] + "enum": ["*"] }, { "items": { @@ -2557,9 +2751,7 @@ "anyOf": [ { "type": "string", - "enum": [ - "*" - ] + "enum": ["*"] }, { "items": { @@ -2599,9 +2791,7 @@ } ], "title": "Stream Mode", - "default": [ - "values" - ] + "default": ["values"] }, "feedback_keys": { "items": { @@ -2612,19 +2802,13 @@ }, "on_completion": { "type": "string", - "enum": [ - "delete", - "keep" - ], + "enum": ["delete", "keep"], "title": "On Completion", "default": "delete" }, "on_disconnect": { "type": "string", - "enum": [ - "cancel", - "continue" - ], + "enum": ["cancel", "continue"], "title": "On Disconnect", "default": "cancel" }, @@ -2635,9 +2819,7 @@ } }, "type": "object", - "required": [ - "assistant_id" - ], + "required": ["assistant_id"], "title": "RunCreateStateless", "description": "Payload for creating a streaming run." }, @@ -2741,12 +2923,7 @@ }, "status": { "type": "string", - "enum": [ - "idle", - "busy", - "interrupted", - "error" - ], + "enum": ["idle", "busy", "interrupted", "error"], "title": "Status", "description": "Filter by thread status." }, @@ -2793,12 +2970,7 @@ }, "status": { "type": "string", - "enum": [ - "idle", - "busy", - "interrupted", - "error" - ], + "enum": ["idle", "busy", "interrupted", "error"], "title": "Status" }, "values": { @@ -2859,9 +3031,7 @@ "description": "Include subgraph states." } }, - "required": [ - "checkpoint" - ], + "required": ["checkpoint"], "type": "object", "title": "ThreadStateCheckpointRequest", "description": "Payload for getting the state of a thread at a checkpoint." @@ -2917,10 +3087,7 @@ "$ref": "#/components/schemas/ThreadState" } }, - "required": [ - "id", - "name" - ] + "required": ["id", "name"] }, "type": "array", "title": "Tasks" @@ -2943,13 +3110,7 @@ } }, "type": "object", - "required": [ - "values", - "next", - "checkpoint", - "metadata", - "created_at" - ], + "required": ["values", "next", "checkpoint", "metadata", "created_at"], "title": "ThreadState" }, "ThreadStateSearch": { @@ -2963,14 +3124,26 @@ "minimum": 1 }, "before": { - "type": "string", "title": "Before", - "description": "Return states before this checkpoint ID." + "description": "Return states before this checkpoint ID.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "object" + } + ] }, "metadata": { "type": "object", "title": "Metadata", "description": "Filter states by metadata key-value pairs." + }, + "checkpoint": { + "type": "object", + "title": "Checkpoint", + "description": "Return states for this subgraph." } }, "type": "object", @@ -3019,6 +3192,146 @@ "title": "ThreadStateUpdateResponse", "description": "Response for adding state to a thread." }, + "StorePutRequest": { + "type": "object", + "required": ["namespace", "key", "value"], + "properties": { + "namespace": { + "type": "array", + "items": { "type": "string" } + }, + "key": { "type": "string" }, + "value": { "type": "object" } + } + }, + "StoreDeleteRequest": { + "type": "object", + "required": ["key"], + "properties": { + "namespace": { + "type": "array", + "items": { "type": "string" } + }, + "key": { "type": "string" } + } + }, + "StoreSearchRequest": { + "type": "object", + "properties": { + "namespace_prefix": { + "type": ["array", "null"], + "items": { "type": "string" } + }, + "filter": { + "type": ["object", "null"], + "additionalProperties": true + }, + "limit": { "type": "integer", "default": 10 }, + "offset": { "type": "integer", "default": 0 } + } + }, + "StoreListNamespacesRequest": { + "type": "object", + "properties": { + "prefix": { + "type": "array", + "items": { "type": "string" } + }, + "suffix": { + "type": "array", + "items": { "type": "string" } + }, + "max_depth": { "type": "integer" }, + "limit": { "type": "integer", "default": 100 }, + "offset": { "type": "integer", "default": 0 } + } + }, + "Item": { + "type": "object", + "required": ["namespace", "key", "value", "created_at", "updated_at"], + "properties": { + "namespace": { + "type": "array", + "items": { "type": "string" } + }, + "key": { "type": "string" }, + "value": { "type": "object" }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "SearchItemsResponse": { + "type": "object", + "required": ["items"], + "properties": { + "items": { + "type": "array", + "items": { "$ref": "#/components/schemas/Item" } + } + } + }, + "ListNamespaceResponse": { + "type": "array", + "items": { + "type": "array", + "items": { "type": "string" } + } + }, + "ErrorResponse": { + "type": "object", + "properties": { + "error": { "type": "string" }, + "message": { "type": "string" } + } + } + }, + "responses": { + "GetItemResponse": { + "description": "Successful retrieval of an item.", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Item" } + } + } + }, + "PutItemResponse": { + "description": "Item successfully stored or updated.", + "content": {} + }, + "DeleteItemResponse": { + "description": "Item successfully deleted.", + "content": {} + }, + "SearchItemsResponse": { + "description": "Successful search operation.", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SearchItemsResponse" } + } + } + }, + "ListNamespacesResponse": { + "description": "Successful retrieval of namespaces.", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ListNamespaceResponse" } + } + } + }, + "ErrorResponse": { + "description": "An error occurred.", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ErrorResponse" } + } + } + }, "ValidationError": { "properties": { "loc": { @@ -3045,11 +3358,7 @@ } }, "type": "object", - "required": [ - "loc", - "msg", - "type" - ], + "required": ["loc", "msg", "type"], "title": "ValidationError" } } From a53a5667304fa5d68d040c333f8af4ad8fdd675c Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Wed, 9 Oct 2024 14:33:09 +0200 Subject: [PATCH 19/19] Bump to 0.2.35 --- libs/langgraph/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/pyproject.toml b/libs/langgraph/pyproject.toml index 4a90b63d3..8c5efd8d9 100644 --- a/libs/langgraph/pyproject.toml +++ b/libs/langgraph/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langgraph" -version = "0.2.34" +version = "0.2.35" description = "Building stateful, multi-actor applications with LLMs" authors = [] license = "MIT"