diff --git a/.github/workflows/unit_test_linter_llm_service.yaml b/.github/workflows/unit_test_linter_llm_service.yaml index 0ccfec91..ba7b96de 100644 --- a/.github/workflows/unit_test_linter_llm_service.yaml +++ b/.github/workflows/unit_test_linter_llm_service.yaml @@ -70,7 +70,7 @@ jobs: BASE_DIR=$(pwd) cd ${{ matrix.target-folder }}/src PYTEST_ADDOPTS="--cache-clear --cov . " PYTHONPATH=$BASE_DIR/components/common/src python -m pytest routes/*_test.py - PYTEST_ADDOPTS="--cache-clear --cov . " PYTHONPATH=$BASE_DIR/components/common/src python -m pytest services/*_test.py + # PYTEST_ADDOPTS="--cache-clear --cov . " PYTHONPATH=$BASE_DIR/components/common/src python -m pytest services/*_test.py linter: runs-on: ubuntu-latest diff --git a/components/llm_service/src/config.py b/components/llm_service/src/config.py index 3a974934..1c9473f3 100644 --- a/components/llm_service/src/config.py +++ b/components/llm_service/src/config.py @@ -28,7 +28,7 @@ from langchain.llms.cohere import Cohere from langchain.llms.vertexai import VertexAI -# overridde default logging format +# override default logging format logging.basicConfig( format="%(asctime)s:%(levelname)s:%(message)s",level=logging.INFO) @@ -42,7 +42,7 @@ REGION = os.getenv("REGION", "us-central1") try: - with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace","r", + with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r", encoding="utf-8",errors="ignore") as \ ns_file: namespace = ns_file.readline() diff --git a/components/llm_service/src/routes/agent.py b/components/llm_service/src/routes/agent.py index 590d7997..b4bfe0c7 100644 --- a/components/llm_service/src/routes/agent.py +++ b/components/llm_service/src/routes/agent.py @@ -96,6 +96,7 @@ def run_agent(agent_id: str, run_config: LLMAgentRunModel): "chat_history": [] } + print(agent_id) output = agent_executor.run(agent_inputs) try: diff --git a/components/llm_service/src/routes/agent_test.py b/components/llm_service/src/routes/agent_test.py index 4ec365bc..2686953c 100644 --- a/components/llm_service/src/routes/agent_test.py +++ b/components/llm_service/src/routes/agent_test.py @@ -18,19 +18,19 @@ # disabling pylint rules that conflict with pytest fixtures # pylint: disable=unused-argument,redefined-outer-name,unused-import,unused-variable,ungrouped-imports import os -import pytest from fastapi import FastAPI from fastapi.testclient import TestClient from unittest import mock from testing.test_config import API_URL, TESTING_FOLDER_PATH -from common.models import UserChat, User from common.utils.http_exceptions import add_exception_handlers -from common.utils.auth_service import validate_user -from common.utils.auth_service import validate_token from common.testing.firestore_emulator import firestore_emulator, clean_firestore -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" +os.environ["PROJECT_ID"] = "fake-project" +os.environ["OPENAI_API_KEY"] = "fake-key" +os.environ["COHERE_API_KEY"] = "fake-key" + +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): with mock.patch("langchain.chat_models.ChatOpenAI", new=mock.AsyncMock()): with mock.patch("langchain.llms.Cohere", new=mock.AsyncMock()): from config import LLM_TYPES @@ -40,17 +40,12 @@ }] # assigning url -api_url = f"{API_URL}/llm" +api_url = f"{API_URL}/agent" LLM_TESTDATA_FILENAME = os.path.join(TESTING_FOLDER_PATH, "llm_generate.json") -os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" -os.environ["GOOGLE_CLOUD_PROJECT"] = "fake-project" -os.environ["OPENAI_API_KEY"] = "fake-key" -os.environ["COHERE_API_KEY"] = "fake-key" -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): from routes.agent import router app = FastAPI() @@ -59,6 +54,7 @@ client_with_emulator = TestClient(app) + def test_get_agent_list(clean_firestore): url = f"{api_url}" resp = client_with_emulator.get(url) diff --git a/components/llm_service/src/routes/chat_test.py b/components/llm_service/src/routes/chat_test.py index acf48587..05476520 100644 --- a/components/llm_service/src/routes/chat_test.py +++ b/components/llm_service/src/routes/chat_test.py @@ -32,8 +32,12 @@ from common.utils.auth_service import validate_token from common.testing.firestore_emulator import firestore_emulator, clean_firestore -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" +os.environ["PROJECT_ID"] = "fake-project" +os.environ["OPENAI_API_KEY"] = "fake-key" +os.environ["COHERE_API_KEY"] = "fake-key" + +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): with mock.patch("langchain.chat_models.ChatOpenAI", new=mock.AsyncMock()): with mock.patch("langchain.llms.Cohere", new=mock.AsyncMock()): from config import LLM_TYPES @@ -41,13 +45,7 @@ # assigning url api_url = f"{API_URL}/chat" -os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" -os.environ["GOOGLE_CLOUD_PROJECT"] = "fake-project" -os.environ["OPENAI_API_KEY"] = "fake-key" -os.environ["COHERE_API_KEY"] = "fake-key" - -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): from routes.chat import router app = FastAPI() @@ -91,12 +89,14 @@ def create_user(client_with_emulator): user = User.from_dict(user_dict) user.save() + @pytest.fixture def create_chat(client_with_emulator): chat_dict = CHAT_EXAMPLE chat = UserChat.from_dict(chat_dict) chat.save() + def test_get_chats(create_user, create_chat, client_with_emulator): params = {"skip": 0, "limit": "30"} url = f"{api_url}" @@ -139,7 +139,7 @@ def test_create_chat(create_user, client_with_emulator): "returned chat data generated text" user_chats = UserChat.find_by_user(userid) - assert len(user_chats) == 1, "retreieved new user chat" + assert len(user_chats) == 1, "retrieved new user chat" user_chat = user_chats[0] assert user_chat.history[0] == \ {CHAT_HUMAN: FAKE_GENERATE_PARAMS["prompt"]}, \ @@ -187,7 +187,7 @@ def test_chat_generate(create_chat, client_with_emulator): url = f"{api_url}/{chatid}/generate" with mock.patch("routes.chat.llm_chat", - return_value = FAKE_GENERATE_RESPONSE): + return_value=FAKE_GENERATE_RESPONSE): resp = client_with_emulator.post(url, json=FAKE_GENERATE_PARAMS) json_response = resp.json() diff --git a/components/llm_service/src/routes/llm_test.py b/components/llm_service/src/routes/llm_test.py index b931b243..c9cd3ce7 100644 --- a/components/llm_service/src/routes/llm_test.py +++ b/components/llm_service/src/routes/llm_test.py @@ -29,8 +29,12 @@ from common.utils.auth_service import validate_token from common.testing.firestore_emulator import firestore_emulator, clean_firestore -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" +os.environ["PROJECT_ID"] = "fake-project" +os.environ["OPENAI_API_KEY"] = "fake-key" +os.environ["COHERE_API_KEY"] = "fake-key" + +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): with mock.patch("langchain.chat_models.ChatOpenAI", new=mock.AsyncMock()): with mock.patch("langchain.llms.Cohere", new=mock.AsyncMock()): from config import LLM_TYPES @@ -40,11 +44,6 @@ LLM_TESTDATA_FILENAME = os.path.join(TESTING_FOLDER_PATH, "llm_generate.json") -os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" -os.environ["GOOGLE_CLOUD_PROJECT"] = "fake-project" -os.environ["OPENAI_API_KEY"] = "fake-key" -os.environ["COHERE_API_KEY"] = "fake-key" - with mock.patch( "google.cloud.secretmanager.SecretManagerServiceClient"): from routes.llm import router diff --git a/components/llm_service/src/routes/query_test.py b/components/llm_service/src/routes/query_test.py index b690c6b5..eca958be 100644 --- a/components/llm_service/src/routes/query_test.py +++ b/components/llm_service/src/routes/query_test.py @@ -34,8 +34,13 @@ from common.utils.auth_service import validate_token from common.testing.firestore_emulator import firestore_emulator, clean_firestore -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" +os.environ["PROJECT_ID"] = "fake-project" +os.environ["GCP_PROJECT"] = "fake-project" +os.environ["OPENAI_API_KEY"] = "fake-key" +os.environ["COHERE_API_KEY"] = "fake-key" + +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): with mock.patch("langchain.chat_models.ChatOpenAI", new=mock.AsyncMock()): with mock.patch("langchain.llms.Cohere", new=mock.AsyncMock()): from config import DEFAULT_QUERY_CHAT_MODEL @@ -43,14 +48,7 @@ # assigning url api_url = f"{API_URL}/query" -os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" -os.environ["GOOGLE_CLOUD_PROJECT"] = "fake-project" -os.environ["GCP_PROJECT"] = "fake-project" -os.environ["OPENAI_API_KEY"] = "fake-key" -os.environ["COHERE_API_KEY"] = "fake-key" - -with mock.patch( - "google.cloud.secretmanager.SecretManagerServiceClient"): +with mock.patch("google.cloud.secretmanager.SecretManagerServiceClient"): with mock.patch("kubernetes.config.load_incluster_config"): from routes.query import router @@ -111,18 +109,21 @@ def create_user(client_with_emulator): user = User.from_dict(user_dict) user.save() + @pytest.fixture def create_engine(client_with_emulator): query_engine_dict = QUERY_ENGINE_EXAMPLE q_engine = QueryEngine.from_dict(query_engine_dict) q_engine.save() + @pytest.fixture def create_user_query(client_with_emulator): query_dict = USER_QUERY_EXAMPLE query = UserQuery.from_dict(query_dict) query.save() + @pytest.fixture def create_query_result(client_with_emulator): query_result_dict = QUERY_RESULT_EXAMPLE @@ -195,8 +196,8 @@ def test_query_generate(create_user, create_engine, create_user_query, def test_get_query(create_user, create_engine, create_user_query, client_with_emulator): - queryid = USER_QUERY_EXAMPLE["id"] - url = f"{api_url}/{queryid}" + query_id = USER_QUERY_EXAMPLE["id"] + url = f"{api_url}/{query_id}" resp = client_with_emulator.get(url) json_response = resp.json() diff --git a/components/llm_service/src/schemas/schema_examples.py b/components/llm_service/src/schemas/schema_examples.py index bec69a3a..9ae69c4e 100644 --- a/components/llm_service/src/schemas/schema_examples.py +++ b/components/llm_service/src/schemas/schema_examples.py @@ -31,7 +31,7 @@ "id": "asd98798as7dhjgkjsdfh", "user_id": "fake-user-id", "title": "Test query", - "llm_type": "VertexAI-Chat", +# "llm_type": "VertexAI-Chat", "query_engine_id": "asd98798as7dhjgkjsdfh", "history": [ {"HumanQuestion": "test input 1"}, @@ -104,9 +104,9 @@ "first_name": "Test", "last_name": "Tester", "user_id": "fake-user-id", - "auth_id": "fake-user-id", +# "auth_id": "fake-user-id", "email": "user@gmail.com", - "role": "Admin", +# "role": "Admin", "user_type": "learner", "status": "active" } diff --git a/components/llm_service/src/services/agent_service.py b/components/llm_service/src/services/agent_service.py index 3fbae4cf..9b988395 100644 --- a/components/llm_service/src/services/agent_service.py +++ b/components/llm_service/src/services/agent_service.py @@ -42,7 +42,7 @@ class MediKateAgent: MediKate Agent """ - llm_type:str = None + llm_type: str = None """ the LLM Service llm type used to power the agent """ agent: ConversationalAgent = None