diff --git a/api/core/model_runtime/model_providers/oci/llm/llm.py b/api/core/model_runtime/model_providers/oci/llm/llm.py index 329a42f58800a9..937513a58ba6a9 100644 --- a/api/core/model_runtime/model_providers/oci/llm/llm.py +++ b/api/core/model_runtime/model_providers/oci/llm/llm.py @@ -267,7 +267,7 @@ def _generate(self, model: str, credentials: dict, if not valid_value: raise InvokeBadRequestError("Does not support function calling") if model.startswith("cohere"): - print("run cohere " * 10) + #print("run cohere " * 10) for message in prompt_messages[:-1]: text = "" if isinstance(message.content, str): @@ -285,7 +285,7 @@ def _generate(self, model: str, credentials: dict, "chatHistory": chathistory, } request_args["chatRequest"].update(args) elif model.startswith("meta"): - print("run meta " * 10) + #print("run meta " * 10) meta_messages = [] for message in prompt_messages: text = message.content @@ -298,10 +298,10 @@ def _generate(self, model: str, credentials: dict, if stream: request_args["chatRequest"]["isStream"] = True - print("final request" + "|" * 20) - print(request_args) + #print("final request" + "|" * 20) + #print(request_args) response = client.chat(request_args) - print(vars(response)) + #print(vars(response)) if stream: return self._handle_generate_stream_response(model, credentials, response, prompt_messages) @@ -356,7 +356,7 @@ def _handle_generate_stream_response(self, model: str, credentials: dict, respon events = response.data.events() for stream in events: chunk = json.loads(stream.data) - print(chunk) + #print(chunk) #chunk: {'apiFormat': 'COHERE', 'text': 'Hello'} diff --git a/api/core/model_runtime/model_providers/oci/oci.yaml b/api/core/model_runtime/model_providers/oci/oci.yaml index cc5d65274c66a4..977f3ffeebccb6 100644 --- a/api/core/model_runtime/model_providers/oci/oci.yaml +++ b/api/core/model_runtime/model_providers/oci/oci.yaml @@ -38,5 +38,5 @@ provider_credential_schema: type: text-input required: true placeholder: - zh_Hans: 在此输入您的 oci api key 文件的内容(base64.b64encode("pem file content replace return".encode('utf-8'))) - en_US: Enter your oci api key file's content(base64.b64encode("pem file content replace return".encode('utf-8')) ) \ No newline at end of file + zh_Hans: 在此输入您的 oci api key 文件的内容(base64.b64encode("pem file content".encode('utf-8'))) + en_US: Enter your oci api key file's content(base64.b64encode("pem file content".encode('utf-8')) ) \ No newline at end of file