From d1f9c21e441d3d580b66e736617313b52c90333b Mon Sep 17 00:00:00 2001 From: Dinh Nguyen Date: Tue, 19 Nov 2024 19:52:33 +0700 Subject: [PATCH] [IN PROGRESS] update mock promt & response --- .../axonivy/connector/openai/mock/MockAI.java | 11 +++-- ...ist-ask-without-system-promt-response.json | 34 ++++++++++++++ .../json/assist-ask-without-system-promt.json | 14 ++++++ ...ist-insert-with-system-promt-response.json | 34 ++++++++++++++ .../json/assist-insert-with-system-promt.json | 18 ++++++++ .../openai/test/AiAssistanceTest.java | 44 +++++++++++++++++++ 6 files changed, 152 insertions(+), 3 deletions(-) create mode 100644 openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt-response.json create mode 100644 openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt.json create mode 100644 openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt-response.json create mode 100644 openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt.json diff --git a/openai-connector-test/src/com/axonivy/connector/openai/mock/MockAI.java b/openai-connector-test/src/com/axonivy/connector/openai/mock/MockAI.java index c04c27f..4c309f9 100644 --- a/openai-connector-test/src/com/axonivy/connector/openai/mock/MockAI.java +++ b/openai-connector-test/src/com/axonivy/connector/openai/mock/MockAI.java @@ -51,8 +51,11 @@ public class MockAI { "completions", json(load("completions.json")), "completions-response", json(load("completions-response.json")), "mail-generator", json(load("mail-generator.json")), - "mail-generator-response", json(load("mail-generator-response.json")) - ); + "mail-generator-response", json(load("mail-generator-response.json")), + "assist-ask-without-system-promt", json(load("assist-ask-without-system-promt.json")), + "assist-ask-without-system-promt-response", json(load("assist-ask-without-system-promt-response.json")), + "assist-insert-with-system-promt", json(load("assist-insert-with-system-promt.json")), + "assist-insert-with-system-promt-response", json(load("assist-insert-with-system-promt-response.json"))); @POST @Path("completions") @@ -89,13 +92,15 @@ private String input(JsonNode request, Map examples) { return null; } - @POST + @POST @Path("chat/completions") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response chat(JsonNode request) { + Ivy.log().warn(request.toPrettyString()); var in = input(request, openAIExamples); var node = openAIExamples.get(in+"-response"); + Ivy.log().warn(request); return Response.ok() .entity(node) .build(); diff --git a/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt-response.json b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt-response.json new file mode 100644 index 0000000..010c94a --- /dev/null +++ b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt-response.json @@ -0,0 +1,34 @@ +{ + "id": "chatcmpl-AVHUYbcl2WKpRj0oaLrZm8yNXosBQ", + "object": "chat.completion", + "created": 1732018770, + "model": "gpt-3.5-turbo-0125", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Sure, here is a combobox for you to pick a brand out of Mercedes, BMW, or Tesla:\n\n", + "refusal": null + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 28, + "completion_tokens": 64, + "total_tokens": 92, + "prompt_tokens_details": { + "cached_tokens": 0, + "audio_tokens": 0 + }, + "completion_tokens_details": { + "reasoning_tokens": 0, + "audio_tokens": 0, + "accepted_prediction_tokens": 0, + "rejected_prediction_tokens": 0 + } + }, + "system_fingerprint": null +} \ No newline at end of file diff --git a/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt.json b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt.json new file mode 100644 index 0000000..6111ce0 --- /dev/null +++ b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-ask-without-system-promt.json @@ -0,0 +1,14 @@ +{ + "model": "gpt-3.5-turbo", + "max_tokens": 1024, + "temperature": 1, + "top_p": 1, + "frequency_penalty": 0, + "presence_penalty": 0, + "messages": [ + { + "role": "user", + "content": "insert a combobox to pick a brand out of: Mercedes, BMW or Tesla" + } + ] +} \ No newline at end of file diff --git a/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt-response.json b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt-response.json new file mode 100644 index 0000000..11d1211 --- /dev/null +++ b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt-response.json @@ -0,0 +1,34 @@ +{ + "id": "chatcmpl-AVHNuLIeqrGBUWPhDez363Gs5akFe", + "object": "chat.completion", + "created": 1732018358, + "model": "gpt-3.5-turbo-0125", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "", + "refusal": null + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 179, + "completion_tokens": 46, + "total_tokens": 225, + "prompt_tokens_details": { + "cached_tokens": 0, + "audio_tokens": 0 + }, + "completion_tokens_details": { + "reasoning_tokens": 0, + "audio_tokens": 0, + "accepted_prediction_tokens": 0, + "rejected_prediction_tokens": 0 + } + }, + "system_fingerprint": null +} \ No newline at end of file diff --git a/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt.json b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt.json new file mode 100644 index 0000000..c14a84d --- /dev/null +++ b/openai-connector-test/src/com/axonivy/connector/openai/mock/json/assist-insert-with-system-promt.json @@ -0,0 +1,18 @@ +{ + "model": "gpt-3.5-turbo", + "max_tokens": 1024, + "temperature": 1, + "top_p": 1, + "frequency_penalty": 0, + "presence_penalty": 0, + "messages": [ + { + "role": "system", + "content": "SYSTEM_PROMT" + }, + { + "role": "user", + "content": "insert a combobox to pick a brand out of: Mercedes, BMW or Tesla" + } + ] +} \ No newline at end of file diff --git a/openai-connector-test/src_test/com/axonivy/connector/openai/test/AiAssistanceTest.java b/openai-connector-test/src_test/com/axonivy/connector/openai/test/AiAssistanceTest.java index 01c0e0f..07d5cbd 100644 --- a/openai-connector-test/src_test/com/axonivy/connector/openai/test/AiAssistanceTest.java +++ b/openai-connector-test/src_test/com/axonivy/connector/openai/test/AiAssistanceTest.java @@ -15,6 +15,9 @@ import org.junit.jupiter.api.Test; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; import ch.ivyteam.ivy.application.IApplication; import ch.ivyteam.ivy.environment.AppFixture; @@ -85,6 +88,13 @@ void mailGenerator() { .isNotEmpty(); } + @Test + void askWithOutSystemPromt() { + JsonNode result = assistWithQuestion("insert a combobox to pick a brand out of: Mercedes, BMW or Tesla", false); + assertThat(result.toPrettyString()) + .isNotEmpty(); + } + private static JsonNode assist(JsonNode quest) { WebTarget client = Ivy.rest().client(OPEN_AI); Entity request = Entity.entity(quest, MediaType.APPLICATION_JSON); @@ -100,5 +110,39 @@ private static JsonNode chatAssist(JsonNode quest) { .post(request).readEntity(JsonNode.class); return result; } + + private static JsonNode assistWithQuestion(String question, boolean includeSystemPrompt) { + WebTarget client = Ivy.rest().client(OPEN_AI); + Entity request = buildPayloadFromQuestion(question, includeSystemPrompt); + Ivy.log().warn("here"); + JsonNode result = client.path("chat/completions").request() + .post(request).readEntity(JsonNode.class); + return result; + } + + private static Entity buildPayloadFromQuestion(String question, boolean includeSystemPrompt) { + ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); + if (includeSystemPrompt) { + arrayNode.add(message("system", "SYSTEM_PROMT")); + } + arrayNode.add(message("user", question)); + ObjectNode request = completion().set("messages", arrayNode); + return Entity.entity(request, MediaType.APPLICATION_JSON); + } + + private static ObjectNode message(String role, String content) { + return JsonNodeFactory.instance.objectNode().put("role", role).put("content", content); + } + + private static ObjectNode completion() { + ObjectNode request = JsonNodeFactory.instance.objectNode(); + request.put("model", "gpt-3.5-turbo"); + request.put("temperature", 1); + request.put("top_p", 1); + request.put("frequency_penalty", 0); + request.put("presence_penalty", 0); + request.put("max_tokens", 1024); + return request; + } }