Skip to content

Commit

Permalink
Merge branch 'upstash-chat-store-integration' of https://github.com/f…
Browse files Browse the repository at this point in the history
…ahreddinozcan/llama_index into upstash-chat-store-integration
  • Loading branch information
fahreddinozcan committed Sep 27, 2024
2 parents 69d3957 + ec9b8a8 commit ee08624
Show file tree
Hide file tree
Showing 9 changed files with 19 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,10 @@ class Ollama(FunctionCallingLLM):
default=True,
description="Whether the model is a function calling model.",
)
keep_alive: Optional[Union[float, str]] = Field(
default="5m",
description="controls how long the model will stay loaded into memory following the request(default: 5m)",
)

_client: Optional[Client] = PrivateAttr()
_async_client: Optional[AsyncClient] = PrivateAttr()
Expand All @@ -116,6 +120,7 @@ def __init__(
client: Optional[Client] = None,
async_client: Optional[AsyncClient] = None,
is_function_calling_model: bool = True,
keep_alive: Optional[Union[float, str]] = None,
**kwargs: Any,
) -> None:
super().__init__(
Expand All @@ -128,6 +133,7 @@ def __init__(
json_mode=json_mode,
additional_kwargs=additional_kwargs,
is_function_calling_model=is_function_calling_model,
keep_alive=keep_alive,
**kwargs,
)

Expand Down Expand Up @@ -279,6 +285,7 @@ def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
format="json" if self.json_mode else "",
tools=tools,
options=self._model_kwargs,
keep_alive=self.keep_alive,
)

tool_calls = response["message"].get("tool_calls", [])
Expand Down Expand Up @@ -311,6 +318,7 @@ def gen() -> ChatResponseGen:
format="json" if self.json_mode else "",
tools=tools,
options=self._model_kwargs,
keep_alive=self.keep_alive,
)

response_txt = ""
Expand Down Expand Up @@ -354,6 +362,7 @@ async def gen() -> ChatResponseAsyncGen:
format="json" if self.json_mode else "",
tools=tools,
options=self._model_kwargs,
keep_alive=self.keep_alive,
)

response_txt = ""
Expand Down Expand Up @@ -396,6 +405,7 @@ async def achat(
format="json" if self.json_mode else "",
tools=tools,
options=self._model_kwargs,
keep_alive=self.keep_alive,
)

tool_calls = response["message"].get("tool_calls", [])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-ollama"
readme = "README.md"
version = "0.3.2"
version = "0.3.3"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
poetry_requirements(
name="poetry",
module_mapping={"pyjwt": ["jwt"]},
)
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,7 @@
node_to_metadata_dict,
)

try:
from deeplake.core.vectorstore.deeplake_vectorstore import VectorStore

DEEPLAKE_INSTALLED = True
except ImportError:
DEEPLAKE_INSTALLED = False
from deeplake.core.vectorstore.deeplake_vectorstore import VectorStore

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -112,12 +107,6 @@ def __init__(
num_workers=ingestion_num_workers,
)

if not DEEPLAKE_INSTALLED:
raise ImportError(
"Could not import deeplake python package. "
"Please install it with `pip install deeplake`."
)

self._vectorstore = VectorStore(
path=dataset_path,
ingestion_batch_size=ingestion_batch_size,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,13 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-vector-stores-deeplake"
readme = "README.md"
version = "0.2.0"
version = "0.2.1"

[tool.poetry.dependencies]
python = ">=3.9,<4.0"
deeplake = ">=3.9.12"
llama-index-core = "^0.11.0"
pyjwt = "*"

[tool.poetry.group.dev.dependencies]
ipython = "8.10.0"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python_tests(
interpreter_constraints=["==3.9.*", "==3.10.*"],
dependencies=["llama-index-integrations/vector_stores/llama-index-vector-stores-deeplake:poetry#pyjwt"],
)
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pytest
import jwt # noqa

from llama_index.core import Document
from llama_index.core.vector_stores.types import (
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python_tests(
interpreter_constraints=["==3.9.*", "==3.10.*"],
dependencies=["llama-index-integrations/vector_stores/llama-index-vector-stores-deeplake:poetry#pyjwt"],
)
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python_tests(
interpreter_constraints=["==3.9.*", "==3.10.*"],
dependencies=["llama-index-integrations/vector_stores/llama-index-vector-stores-deeplake:poetry#pyjwt"]
)

0 comments on commit ee08624

Please sign in to comment.