-
Notifications
You must be signed in to change notification settings - Fork 203
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
150 additions
and
0 deletions.
There are no files selected for viewing
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import asyncio | ||
import multiprocessing | ||
import time | ||
|
||
import pytest | ||
|
||
from llama_deploy import ( | ||
ControlPlaneConfig, | ||
SimpleMessageQueueConfig, | ||
WorkflowServiceConfig, | ||
deploy_core, | ||
deploy_workflow, | ||
) | ||
|
||
from .workflow import HumanInTheLoopWorkflow | ||
|
||
|
||
def run_async_core(): | ||
asyncio.run(deploy_core(ControlPlaneConfig(), SimpleMessageQueueConfig())) | ||
|
||
|
||
@pytest.fixture(scope="package") | ||
def core(): | ||
p = multiprocessing.Process(target=run_async_core) | ||
p.start() | ||
time.sleep(5) | ||
|
||
yield | ||
|
||
p.kill() | ||
|
||
|
||
def run_async_workflow(): | ||
asyncio.run( | ||
deploy_workflow( | ||
HumanInTheLoopWorkflow(timeout=60), | ||
WorkflowServiceConfig( | ||
host="127.0.0.1", | ||
port=8002, | ||
service_name="hitl_workflow", | ||
), | ||
ControlPlaneConfig(), | ||
) | ||
) | ||
|
||
|
||
@pytest.fixture(scope="package") | ||
def services(core): | ||
p = multiprocessing.Process(target=run_async_workflow) | ||
p.start() | ||
time.sleep(5) | ||
|
||
yield | ||
|
||
p.kill() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
import asyncio | ||
import pytest | ||
import time | ||
|
||
from llama_deploy import AsyncLlamaDeployClient, ControlPlaneConfig, LlamaDeployClient | ||
from llama_index.core.workflow.events import HumanResponseEvent | ||
|
||
|
||
@pytest.mark.e2ehitl | ||
def test_run_client(services): | ||
client = LlamaDeployClient(ControlPlaneConfig(), timeout=10) | ||
|
||
# sanity check | ||
sessions = client.list_sessions() | ||
assert len(sessions) == 0, "Sessions list is not empty" | ||
|
||
# create a session | ||
session = client.create_session() | ||
|
||
# kick off run | ||
task_id = session.run_nowait("hitl_workflow") | ||
|
||
# send event | ||
session.send_event( | ||
ev=HumanResponseEvent(response="42"), | ||
service_name="hitl_workflow", | ||
task_id=task_id, | ||
) | ||
|
||
# get final result, polling to wait for workflow to finish after send event | ||
final_result = None | ||
while final_result is None: | ||
final_result = session.get_task_result(task_id) | ||
time.sleep(0.1) | ||
assert final_result.result == "42", "The human's response is not consistent." | ||
|
||
# delete the session | ||
client.delete_session(session.session_id) | ||
sessions = client.list_sessions() | ||
assert len(sessions) == 0, "Sessions list is not empty" | ||
|
||
|
||
@pytest.mark.e2ehitl | ||
@pytest.mark.asyncio | ||
async def test_run_client_async(services): | ||
client = AsyncLlamaDeployClient(ControlPlaneConfig(), timeout=10) | ||
|
||
# sanity check | ||
sessions = await client.list_sessions() | ||
assert len(sessions) == 0, "Sessions list is not empty" | ||
|
||
# create a session | ||
session = await client.create_session() | ||
|
||
# kick off run | ||
task_id = await session.run_nowait("hitl_workflow") | ||
|
||
# send event | ||
await session.send_event( | ||
ev=HumanResponseEvent(response="42"), | ||
service_name="hitl_workflow", | ||
task_id=task_id, | ||
) | ||
|
||
# get final result, polling to wait for workflow to finish after send event | ||
final_result = None | ||
while final_result is None: | ||
final_result = await session.get_task_result(task_id) | ||
asyncio.sleep(0.1) | ||
assert final_result.result == "42", "The human's response is not consistent." | ||
|
||
# delete the session | ||
await client.delete_session(session.session_id) | ||
sessions = await client.list_sessions() | ||
assert len(sessions) == 0, "Sessions list is not empty" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
from llama_index.core.workflow import ( | ||
StartEvent, | ||
StopEvent, | ||
Workflow, | ||
step, | ||
) | ||
from llama_index.core.workflow.events import ( | ||
HumanResponseEvent, | ||
InputRequiredEvent, | ||
) | ||
|
||
|
||
class HumanInTheLoopWorkflow(Workflow): | ||
@step | ||
async def step1(self, ev: StartEvent) -> InputRequiredEvent: | ||
return InputRequiredEvent(prefix="Enter a number: ") | ||
|
||
@step | ||
async def step2(self, ev: HumanResponseEvent) -> StopEvent: | ||
return StopEvent(result=ev.response) |