diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 5580637..bdd78dd 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -5,13 +5,18 @@ on: push jobs: lint-and-test: runs-on: ubuntu-latest + strategy: + matrix: + # We support latest 3.x version and 3.7 because + # Google Colab uses 3.7 by default. + python-version: [3.7, 3.x] steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v3 with: - python-version: "3.x" + python-version: ${{ matrix.python-version }} cache: "pip" cache-dependency-path: | **/setup.py diff --git a/Changelog.md b/Changelog.md index c2bd9f9..0a0c8c6 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,5 +1,12 @@ # Changelog +## 2.4.1 + +### Python support + +- Minimal supported Python version is now 3.7 +- Previously we only supported version 3.8 + ## 2.4.0 ### New feature diff --git a/aleph_alpha_client/aleph_alpha_client.py b/aleph_alpha_client/aleph_alpha_client.py index 481ec02..657bbbf 100644 --- a/aleph_alpha_client/aleph_alpha_client.py +++ b/aleph_alpha_client/aleph_alpha_client.py @@ -127,8 +127,22 @@ def available_models(self): ) return self._translate_errors(response).json() + def available_checkpoints(self): + """ + Queries all checkpoints which are currently available. + """ + response = self.get_request( + self.host + "checkpoints_available", headers=self.request_headers + ) + return self._translate_errors(response).json() + def tokenize( - self, model: Optional[str], prompt: str, tokens: bool = True, token_ids: bool = True, checkpoint: Optional[str] = None + self, + model: Optional[str], + prompt: str, + tokens: bool = True, + token_ids: bool = True, + checkpoint: Optional[str] = None, ): """ Tokenizes the given prompt for the given model. @@ -153,7 +167,12 @@ def tokenize( ) return self._translate_errors(response).json() - def detokenize(self, model: Optional[str], token_ids: List[int], checkpoint: Optional[str] = None): + def detokenize( + self, + model: Optional[str], + token_ids: List[int], + checkpoint: Optional[str] = None, + ): """ Detokenizes the given tokens. """ diff --git a/aleph_alpha_client/embedding.py b/aleph_alpha_client/embedding.py index 6d78345..6762422 100644 --- a/aleph_alpha_client/embedding.py +++ b/aleph_alpha_client/embedding.py @@ -3,7 +3,6 @@ Any, Dict, List, - Literal, NamedTuple, Optional, Tuple, @@ -113,7 +112,7 @@ class SemanticEmbeddingRequest(NamedTuple): prompt: Prompt representation: SemanticRepresentation - compress_to_size: Optional[Literal[128]] = None + compress_to_size: Optional[int] = None class SemanticEmbeddingResponse(NamedTuple): diff --git a/aleph_alpha_client/version.py b/aleph_alpha_client/version.py index 3d67cd6..54499df 100644 --- a/aleph_alpha_client/version.py +++ b/aleph_alpha_client/version.py @@ -1 +1 @@ -__version__ = "2.4.0" +__version__ = "2.4.1" diff --git a/tests/test_qa.py b/tests/test_qa.py index 43890fb..2d4ffea 100644 --- a/tests/test_qa.py +++ b/tests/test_qa.py @@ -80,9 +80,10 @@ def test_qa_with_client(client: AlephAlphaClient): def test_qa_with_client_against_checkpoint( client: AlephAlphaClient, qa_checkpoint_name: str ): - model_name = "luminous-extended" # given a client - assert model_name in map(lambda model: model["name"], client.available_models()) + assert qa_checkpoint_name in map( + lambda checkpoint: checkpoint["name"], client.available_checkpoints() + ) # when posting a QA request with explicit parameters response = client.qa(