Skip to content

Commit

Permalink
refactor error ergonomics
Browse files Browse the repository at this point in the history
  • Loading branch information
amoffat committed Aug 4, 2024
1 parent bd7d27e commit 5d98fd9
Show file tree
Hide file tree
Showing 6 changed files with 136 additions and 26 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# 0.2.1 - 8/4/24

- Make startup error messages much nicer

# 0.2.0 - 8/2/24

- Support for `tuple` serde datatype
Expand Down
30 changes: 16 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,26 +1,21 @@
# Manifest
# Manifest

```
man·i·fest [verb]
: to make something happen by imagining it and consciously thinking that it will happen
```

Manifest is an experiment in letting an LLM provide the return value for a
function. It allows you to manifest powerful behaviors into existence merely by
defining the function's facade.

Shout out to [@dstufft](https://github.com/dstufft) for gifting me the PyPI repo
name [`manifest`](https://pypi.org/project/manifest/) 🙏

```
pip install manifest
```
Want to easily use an LLM in your code without writing prompts or setting up an
LLM client? Manifest makes it as easy as writing a function that describes what
you want it to do.

# Examples

## Sentiment analysis

Classify some text as positive or not.

```python
from manifest import ai

Expand All @@ -33,6 +28,8 @@ assert is_optimistic("This is amazing!")

## Translation

Translate text from one language to another.

```python
from manifest import ai

Expand All @@ -45,8 +42,7 @@ assert translate("Hello", "fr") == "Bonjour"

## Image analysis

You can pass in a file to make use of a model's multimodal abilities. It
supports `Path`, `io.BytesIO` and `io.BufferedReader`
Analyze images by passing in a Path to a file.

```python
from pathlib import Path
Expand All @@ -62,7 +58,7 @@ print(breed_of_dog(image))

## Complex objects

Your function can use fairly complex data structures.
For advanced uses, you can return complex data structures.

```python
from dataclasses import dataclass
Expand Down Expand Up @@ -90,6 +86,12 @@ print(like_inception)

```

# Installation

```
pip install manifest
```

# How does it work?

Manifest relies heavily on runtime metadata, such as a function's name,
Expand Down
5 changes: 5 additions & 0 deletions manifest/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from manifest.types.service import Service

ENV_KEY_NAMES = {
Service.OPENAI: "OPENAI_API_KEY",
}
28 changes: 28 additions & 0 deletions manifest/exc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from manifest.types.service import Service


class NoLLMFoundError(Exception):
"""When the automatic LLM initialization fails to find any LLM api keys in
the environment."""

pass


class UnknownLLMServiceError(Exception):
"""When the user specifies an unknown LLM service."""

def __init__(self, service: str):
self.service = service
super().__init__(f"Unknown LLM service: {service}")


class NoApiKeyError(Exception):
"""When the automatic LLM initialization fails to find an api key for a
specific LLM service."""

def __init__(self, service: "Service"):
self.service = service
super().__init__(f"No API key found for {service}")
93 changes: 82 additions & 11 deletions manifest/initialize.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import os
import sys
from typing import TYPE_CHECKING, Callable

from manifest import constants, exc
from manifest.types.service import Service

if TYPE_CHECKING:
Expand All @@ -9,8 +11,70 @@

# Will be replaced during manual initialization
def make_llm() -> "LLM":
make = env_init()
return make()
header = """manifest.py error:"""
manual_clause = """
For advanced users, you may manually initialize the LLM client in your code by
calling `manifest.init(client_maker)`, where `client_maker` is a function that
returns an LLM client.
""".strip()

valid_services = "\n".join([f" - {s.value}" for s in Service])
valid_envs = "\n".join(
[f" - {constants.ENV_KEY_NAMES[s]}" for s in Service if s != Service.AUTO]
)

try:
make = env_init()
return make()
except exc.UnknownLLMServiceError as e:
print(
f"""
{header}
Unknown LLM service: "{e.service}". Please specify one of the following
services instead:
{valid_services}
Exiting.
""",
file=sys.stderr,
)

except exc.NoApiKeyError as e:
print(
f"""
{header}
No api key found for {e.service}, try defining the environment variable
{constants.ENV_KEY_NAMES[e.service]} in a .env file or in your environment, then
re-running the program.
{manual_clause}
Exiting.
""",
file=sys.stderr,
)

except exc.NoLLMFoundError:
print(
f"""
{header}
No LLM api keys found, try defining one of the following environment variables
in a .env file or in your environment, then re-running the program:
{valid_envs}
{manual_clause}
Exiting.
""",
file=sys.stderr,
)

exit(1)


def init(make: Callable[[], "LLM"]) -> None:
Expand All @@ -27,19 +91,26 @@ def env_init() -> Callable[[], "LLM"]:

load_dotenv()

service = Service(os.getenv("LLM_SERVICE", "auto"))
# Do you have multiple LLM keys? Allow the user to choose
service_name = os.getenv("MANIFEST_SERVICE", "auto").lower()
try:
service = Service(service_name)
except ValueError:
raise exc.UnknownLLMServiceError(service_name)

key_names = constants.ENV_KEY_NAMES

if service == Service.AUTO:
openai = os.getenv("OPENAI_API_KEY")
if openai:
openai_key = os.getenv(key_names[Service.OPENAI])
if openai_key:
service = Service.OPENAI
else:
raise ValueError("No LLM service discovered")
raise exc.NoLLMFoundError

elif service == Service.OPENAI:
openai = os.getenv("OPENAI_API_KEY")
if not openai:
raise ValueError("OPENAI_API_KEY is required for OpenAI service")
openai_key = os.getenv(key_names[Service.OPENAI])
if not openai_key:
raise exc.NoApiKeyError(service)

# Now we know what service we want to use

Expand All @@ -49,11 +120,11 @@ def make_llm(**kwargs) -> "LLM":
from manifest.llm.openai import OpenAILLM

return OpenAILLM(
api_key=openai,
api_key=openai_key,
model="gpt-4o",
**kwargs,
)

return make_llm

raise ValueError(f"Unknown service: {service}")
raise exc.UnknownLLMServiceError(service_name)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "manifest"
version = "0.2.0"
version = "0.2.1"
description = "Use an LLM to execute code"
homepage = "https://github.com/amoffat/manifest"
repository = "https://github.com/amoffat/manifest"
Expand Down

0 comments on commit 5d98fd9

Please sign in to comment.