diff --git a/dbgpt/rag/index/base.py b/dbgpt/rag/index/base.py index 764c362e4..e33509c82 100644 --- a/dbgpt/rag/index/base.py +++ b/dbgpt/rag/index/base.py @@ -109,6 +109,10 @@ def delete_vector_name(self, index_name: str): index_name(str): The name of index to delete. """ + def vector_name_exists(self) -> bool: + """Whether name exists.""" + return True + def load_document_with_limit( self, chunks: List[Chunk], max_chunks_once_load: int = 10, max_threads: int = 1 ) -> List[str]: diff --git a/dbgpt/storage/vector_store/connector.py b/dbgpt/storage/vector_store/connector.py index b93872115..4cd5bddd3 100644 --- a/dbgpt/storage/vector_store/connector.py +++ b/dbgpt/storage/vector_store/connector.py @@ -87,6 +87,9 @@ def __init__( - vector_store_type: vector store type Milvus, Chroma, Weaviate - ctx: vector store config params. """ + if vector_store_config is None: + raise Exception("vector_store_config is required") + self._index_store_config = vector_store_config self._register() @@ -96,36 +99,25 @@ def __init__( raise Exception(f"Vector store {vector_store_type} not supported") logger.info(f"VectorStore:{self.connector_class}") - self._vector_store_type = vector_store_type - self._embeddings = ( - vector_store_config.embedding_fn if vector_store_config else None - ) + self._vector_store_type = vector_store_type + self._embeddings = vector_store_config.embedding_fn + + config_dict = {} + for key in vector_store_config.to_dict().keys(): + value = getattr(vector_store_config, key) + if value is not None: + config_dict[key] = value + for key, value in vector_store_config.model_extra.items(): + if value is not None: + config_dict[key] = value + config = self.config_class(**config_dict) try: - if vector_store_config is not None: - config: IndexStoreConfig = self.config_class() - config.name = getattr(vector_store_config, "name", "default_name") - config.embedding_fn = getattr(vector_store_config, "embedding_fn", None) - config.max_chunks_once_load = getattr( - vector_store_config, "max_chunks_once_load", 5 - ) - config.max_threads = getattr(vector_store_config, "max_threads", 4) - config.user = getattr(vector_store_config, "user", None) - config.password = getattr(vector_store_config, "password", None) - - # extra - config_dict = vector_store_config.dict() - config.llm_client = config_dict.get("llm_client", None) - config.model_name = config_dict.get("model_name", None) - if ( - vector_store_type in pools - and config.name in pools[vector_store_type] - ): - self.client = pools[vector_store_type][config.name] - else: - client = self.connector_class(config) - pools[vector_store_type][config.name] = self.client = client - self.client = client + if vector_store_type in pools and config.name in pools[vector_store_type]: + self.client = pools[vector_store_type][config.name] + else: + client = self.connector_class(config) + pools[vector_store_type][config.name] = self.client = client except Exception as e: logger.error("connect vector store failed: %s", e) raise e diff --git a/examples/client/client_chat_example.py b/examples/client/client_chat_example.py index 266ef1bc7..4b22da0ff 100644 --- a/examples/client/client_chat_example.py +++ b/examples/client/client_chat_example.py @@ -57,12 +57,12 @@ async def main(): # initialize client DBGPT_API_KEY = "dbgpt" client = Client(api_key=DBGPT_API_KEY) - - async for data in client.chat_stream( - model="chatgpt_proxyllm", - messages="hello", - ): - print(data) + data = await client.chat(model="chatgpt_proxyllm", messages="hello") + # async for data in client.chat_stream( + # model="chatgpt_proxyllm", + # messages="hello", + # ): + print(data) # res = await client.chat(model="chatgpt_proxyllm" ,messages="hello") # print(res) diff --git a/examples/rag/graph_rag_example.py b/examples/rag/graph_rag_example.py index 1430e8965..069361c2d 100644 --- a/examples/rag/graph_rag_example.py +++ b/examples/rag/graph_rag_example.py @@ -6,6 +6,7 @@ from dbgpt.rag import ChunkParameters from dbgpt.rag.assembler import EmbeddingAssembler from dbgpt.rag.knowledge import KnowledgeFactory +from dbgpt.storage.knowledge_graph.knowledge_graph import BuiltinKnowledgeGraphConfig from dbgpt.storage.vector_store.base import VectorStoreConfig from dbgpt.storage.vector_store.connector import VectorStoreConnector