Skip to content

Commit

Permalink
ux: wrap httpx to uniform connection errors (#278)
Browse files Browse the repository at this point in the history
* wrap httpx to uniform connection errors

* Update utils.py
  • Loading branch information
masci authored Sep 27, 2024
1 parent 6cac2b7 commit 7ce73df
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 21 deletions.
5 changes: 3 additions & 2 deletions llama_deploy/cli/deploy.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from typing import IO

import click
import httpx

from .utils import request


@click.command()
Expand All @@ -12,7 +13,7 @@ def deploy(global_config: tuple, deployment_config_file: IO) -> None:
deploy_url = f"{server_url}/deployments/create/"

files = {"file": deployment_config_file.read()}
resp = httpx.post(deploy_url, files=files, verify=not disable_ssl)
resp = request("POST", deploy_url, files=files, verify=not disable_ssl)

if resp.status_code >= 400:
raise click.ClickException(resp.json().get("detail"))
Expand Down
12 changes: 4 additions & 8 deletions llama_deploy/cli/status.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import click
import httpx


from .utils import request


@click.command()
Expand All @@ -8,13 +10,7 @@ def status(global_config: tuple) -> None:
server_url, disable_ssl = global_config
status_url = f"{server_url}/status/"

try:
r = httpx.get(status_url, verify=not disable_ssl)
except httpx.ConnectError:
raise click.ClickException(
f"Llama Deploy is not responding, check the apiserver address {server_url} is correct and try again."
)

r = request("GET", status_url, verify=not disable_ssl)
if r.status_code >= 400:
body = r.json()
click.echo(
Expand Down
18 changes: 18 additions & 0 deletions llama_deploy/cli/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from typing import Any
from urllib.parse import urlparse

import click
import httpx


def request(
method: str, url: str | httpx.URL, *args: Any, **kwargs: Any
) -> httpx.Response:
try:
return httpx.request(method, url, *args, **kwargs)
except httpx.ConnectError:
parsed_url = urlparse(str(url))
raise click.ClickException(
"Llama Deploy is not responding, check that the apiserver "
f"is running at {parsed_url.scheme}://{parsed_url.netloc} and try again."
)
11 changes: 6 additions & 5 deletions tests/cli/test_deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,14 @@
def test_deploy(runner: CliRunner, data_path: Path) -> None:
test_config_file = data_path / "deployment.yaml"
mocked_response = mock.MagicMock(status_code=200, json=lambda: {})
with mock.patch("llama_deploy.cli.deploy.httpx") as mocked_httpx:
mocked_httpx.post.return_value = mocked_response
with mock.patch("llama_deploy.cli.deploy.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["deploy", str(test_config_file)])

assert result.exit_code == 0
with open(test_config_file, "rb") as f:
mocked_httpx.post.assert_called_with(
mocked_httpx.assert_called_with(
"POST",
"http://localhost:4501/deployments/create/",
files={"file": f.read()},
verify=True,
Expand All @@ -27,8 +28,8 @@ def test_deploy_failed(runner: CliRunner, data_path: Path) -> None:
mocked_response = mock.MagicMock(
status_code=401, json=lambda: {"detail": "Unauthorized!"}
)
with mock.patch("llama_deploy.cli.deploy.httpx") as mocked_httpx:
mocked_httpx.post.return_value = mocked_response
with mock.patch("llama_deploy.cli.deploy.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["deploy", str(test_config_file)])
assert result.exit_code == 1
assert result.output == "Error: Unauthorized!\n"
12 changes: 6 additions & 6 deletions tests/cli/test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,17 @@ def test_status_server_down(runner: CliRunner) -> None:

def test_status_unhealthy(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=500)
with mock.patch("llama_deploy.cli.status.httpx") as mocked_httpx:
mocked_httpx.get.return_value = mocked_response
with mock.patch("llama_deploy.cli.status.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["status"])
assert result.exit_code == 0
assert "Llama Deploy is unhealthy: [500]" in result.output


def test_status(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=200, json=lambda: {})
with mock.patch("llama_deploy.cli.status.httpx") as mocked_httpx:
mocked_httpx.get.return_value = mocked_response
with mock.patch("llama_deploy.cli.status.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["status"])
assert result.exit_code == 0
assert (
Expand All @@ -35,8 +35,8 @@ def test_status(runner: CliRunner) -> None:
def test_status_with_deployments(runner: CliRunner) -> None:
mocked_response = mock.MagicMock(status_code=200)
mocked_response.json.return_value = {"deployments": ["foo", "bar"]}
with mock.patch("llama_deploy.cli.status.httpx") as mocked_httpx:
mocked_httpx.get.return_value = mocked_response
with mock.patch("llama_deploy.cli.status.request") as mocked_httpx:
mocked_httpx.return_value = mocked_response
result = runner.invoke(llamactl, ["status"])
assert result.exit_code == 0
assert result.output == (
Expand Down

0 comments on commit 7ce73df

Please sign in to comment.