From b020bd74ebcca8b8ca84de7332165eb434b0435f Mon Sep 17 00:00:00 2001 From: Fabian Gebhart <16943048+fgebhart@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:07:35 +0100 Subject: [PATCH] drop default api url for host of clients --- .env.example | 2 +- .github/workflows/integration.yml | 2 +- Changelog.md | 5 +++++ Contributing.md | 4 ++-- README.md | 6 ++++-- aleph_alpha_client/aleph_alpha_client.py | 19 ++++++++++--------- pyproject.toml | 2 +- tests/common.py | 2 +- tests/test_embed.py | 4 ++-- tests/test_qa.py | 1 - 10 files changed, 27 insertions(+), 20 deletions(-) diff --git a/.env.example b/.env.example index 0be3022..99eb740 100644 --- a/.env.example +++ b/.env.example @@ -1,2 +1,2 @@ -TEST_API_URL=https://test.api.aleph-alpha.com +TEST_API_URL=https://inference-api.your-domain.com TEST_TOKEN= \ No newline at end of file diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index c9c021f..abbd38d 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -26,5 +26,5 @@ jobs: run: | poetry run pytest env: - TEST_API_URL: https://api.aleph-alpha.com + TEST_API_URL: ${{ secrets.TEST_API_URL }} TEST_TOKEN: ${{ secrets.AA_API_TOKEN }} diff --git a/Changelog.md b/Changelog.md index dd1d9d6..03aa5a2 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,5 +1,10 @@ # Changelog +## 8.0.0 + +- Remove default value for `host` parameter in `Client` and `AsyncClient`. Passing a value for + the `host` is now required. + ## 7.6.0 - Add `instructable_embed` to `Client` and `AsyncClient` diff --git a/Contributing.md b/Contributing.md index 8a2e526..001b165 100644 --- a/Contributing.md +++ b/Contributing.md @@ -16,7 +16,7 @@ Tests can be run using pytest. Make sure to create a `.env` file with the follow ```env # test settings -TEST_API_URL=https://test.api.aleph-alpha.com +TEST_API_URL=https://inference-api.your-domain.com TEST_TOKEN=your_token ``` @@ -24,7 +24,7 @@ Instead of a token username and password can be used. ```env # test settings -TEST_API_URL=https://api.aleph-alpha.com +TEST_API_URL=https://inference-api.your-domain.com TEST_USERNAME=your_username TEST_PASSWORD=your_password ``` diff --git a/README.md b/README.md index 363f7ef..89ff4ae 100644 --- a/README.md +++ b/README.md @@ -21,9 +21,10 @@ from aleph_alpha_client import Client, CompletionRequest, Prompt client = Client(token=os.getenv("AA_TOKEN")) request = CompletionRequest( prompt=Prompt.from_text("Provide a short description of AI:"), + host="https://inference-api.your-domain.com", maximum_tokens=64, ) -response = client.complete(request, model="luminous-extended") +response = client.complete(request, model="pharia-1-llm-7b-control") print(response.completions[0].completion) ``` @@ -38,9 +39,10 @@ from aleph_alpha_client import AsyncClient, CompletionRequest, Prompt async with AsyncClient(token=os.environ["AA_TOKEN"]) as client: request = CompletionRequest( prompt=Prompt.from_text("Provide a short description of AI:"), + host="https://inference-api.your-domain.com", maximum_tokens=64, ) - response = client.complete_with_streaming(request, model="luminous-base") + response = client.complete_with_streaming(request, model="pharia-1-llm-7b-control") async for stream_item in response: print(stream_item) diff --git a/aleph_alpha_client/aleph_alpha_client.py b/aleph_alpha_client/aleph_alpha_client.py index 985a07c..b96a2bf 100644 --- a/aleph_alpha_client/aleph_alpha_client.py +++ b/aleph_alpha_client/aleph_alpha_client.py @@ -129,7 +129,7 @@ class Client: token (string, required): The API token that will be used for authentication. - host (string, required, default "https://api.aleph-alpha.com"): + host (string, required): The hostname of the API host. hosting(string, optional, default None): @@ -164,16 +164,16 @@ class Client: Example usage: >>> request = CompletionRequest( - prompt=Prompt.from_text(f"Request"), maximum_tokens=64 + prompt=Prompt.from_text(f"Request"), host="https://inference-api.your-domain.com", maximum_tokens=64 ) >>> client = Client(token=os.environ["AA_TOKEN"]) - >>> response: CompletionResponse = client.complete(request, "luminous-base") + >>> response: CompletionResponse = client.complete(request, "pharia-1-llm-7b-control") """ def __init__( self, token: str, - host: str = "https://api.aleph-alpha.com", + host: str, hosting: Optional[str] = None, request_timeout_seconds: int = DEFAULT_REQUEST_TIMEOUT, total_retries: int = 8, @@ -706,7 +706,7 @@ class AsyncClient: token (string, required): The API token that will be used for authentication. - host (string, required, default "https://api.aleph-alpha.com"): + host (string, required): The hostname of the API host. hosting(string, optional, default None): @@ -740,15 +740,17 @@ class AsyncClient: Internal feature. Example usage: - >>> request = CompletionRequest(prompt=Prompt.from_text(f"Request"), maximum_tokens=64) + >>> request = CompletionRequest( + prompt=Prompt.from_text(f"Request"), host="https://inference-api.your-domain.com", maximum_tokens=64 + ) >>> async with AsyncClient(token=os.environ["AA_TOKEN"]) as client: - response: CompletionResponse = await client.complete(request, "luminous-base") + response: CompletionResponse = await client.complete(request, "pharia-1-llm-7b-control") """ def __init__( self, token: str, - host: str = "https://api.aleph-alpha.com", + host: str, hosting: Optional[str] = None, request_timeout_seconds: int = DEFAULT_REQUEST_TIMEOUT, total_retries: int = 8, @@ -846,7 +848,6 @@ async def _post_request( json_body = self._build_json_body(request, model) query_params = self._build_query_parameters() - async with self.session.post( self.host + endpoint, json=json_body, params=query_params ) as response: diff --git a/pyproject.toml b/pyproject.toml index 620919e..191945c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aleph-alpha-client" -version = "7.6.0" +version = "8.0.0" description = "python client to interact with Aleph Alpha api endpoints" authors = ["Aleph Alpha "] license = "MIT" diff --git a/tests/common.py b/tests/common.py index 9390638..80f48ae 100644 --- a/tests/common.py +++ b/tests/common.py @@ -34,7 +34,7 @@ def model_name() -> str: @pytest.fixture(scope="session") def chat_model_name() -> str: - return "llama-3.1-70b-instruct" + return "llama-3.1-8b-instruct" @pytest.fixture(scope="session") diff --git a/tests/test_embed.py b/tests/test_embed.py index 91344d3..49f3647 100644 --- a/tests/test_embed.py +++ b/tests/test_embed.py @@ -115,7 +115,7 @@ async def test_can_instructable_embed_with_async_client( ) response = await async_client.instructable_embed( - request, model="Pharia-1-Embedding-4608-control" + request, model="pharia-1-embedding-4608-control" ) assert response.model_version is not None assert response.embedding @@ -239,7 +239,7 @@ def test_embed_instructable(sync_client: Client): ) result = sync_client.instructable_embed( - request=request, model="Pharia-1-Embedding-4608-control" + request=request, model="pharia-1-embedding-4608-control" ) assert result.model_version is not None diff --git a/tests/test_qa.py b/tests/test_qa.py index e09cb64..8b362b9 100644 --- a/tests/test_qa.py +++ b/tests/test_qa.py @@ -18,7 +18,6 @@ async def test_can_qa_with_async_client(async_client: AsyncClient): query="Who likes pizza?", documents=[Document.from_text("Andreas likes pizza.")], ) - response = await async_client.qa(request) assert len(response.answers) == 1 assert response.answers[0].score > 0.0