From a5a0f0a60fab53daa1188311553e3509729255e7 Mon Sep 17 00:00:00 2001 From: Aleksije Micic Date: Sat, 6 Jan 2024 20:00:47 +0100 Subject: [PATCH 1/3] Add responseFormat attribute --- .../chat/ChatCompletionRequest.java | 8 ++++++++ .../completion/chat/ChatResponseFormat.java | 19 +++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java index e4479ff3..55f31ea2 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java @@ -27,6 +27,14 @@ public class ChatCompletionRequest { */ List messages; + /** + * Must be either 'text' or 'json_object'.
+ * When specifying 'json_object' as the request format it's still necessary to instruct the model to return JSON. + * You may use {@link ChatResponseFormat} enum. + */ + @JsonProperty("response_format") + String responseFormat; + /** * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower * values like 0.2 will make it more focused and deterministic.
diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java new file mode 100644 index 00000000..e1f7cb4e --- /dev/null +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java @@ -0,0 +1,19 @@ +package com.theokanning.openai.completion.chat; + +/** + * see {@link ChatCompletionRequest} documentation. + */ +public enum ChatResponseFormat { + TEXT("text"), + JSON("json"); + + private final String value; + + ChatResponseFormat(final String value) { + this.value = value; + } + + public String value() { + return value; + } +} From e580f6325c68545ff2e7be13a5db66ae38b77813 Mon Sep 17 00:00:00 2001 From: Aleksije Micic Date: Sat, 6 Jan 2024 20:41:03 +0100 Subject: [PATCH 2/3] Add integration test and fix bug --- .../chat/ChatCompletionRequest.java | 4 +-- .../completion/chat/ChatResponseFormat.java | 31 +++++++++++------ .../openai/service/ChatCompletionTest.java | 34 +++++++++++++++++++ 3 files changed, 57 insertions(+), 12 deletions(-) diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java index 55f31ea2..5595c10b 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java @@ -30,10 +30,10 @@ public class ChatCompletionRequest { /** * Must be either 'text' or 'json_object'.
* When specifying 'json_object' as the request format it's still necessary to instruct the model to return JSON. - * You may use {@link ChatResponseFormat} enum. + * You may use {@link ChatResponseFormat.ResponseFormat} enum. */ @JsonProperty("response_format") - String responseFormat; + ChatResponseFormat responseFormat; /** * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java index e1f7cb4e..8e498b3f 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java @@ -1,19 +1,30 @@ package com.theokanning.openai.completion.chat; +import com.fasterxml.jackson.annotation.JsonValue; +import lombok.Builder; +import lombok.Data; + /** * see {@link ChatCompletionRequest} documentation. */ -public enum ChatResponseFormat { - TEXT("text"), - JSON("json"); +@Data +@Builder +public class ChatResponseFormat { + private ResponseFormat type; - private final String value; + public enum ResponseFormat { + TEXT("text"), + JSON("json_object"); - ChatResponseFormat(final String value) { - this.value = value; - } + private final String value; + + ResponseFormat(final String value) { + this.value = value; + } - public String value() { - return value; + @JsonValue + public String value() { + return value; + } } -} +} \ No newline at end of file diff --git a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java index 25f0defb..89023ade 100644 --- a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java +++ b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java @@ -2,7 +2,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.theokanning.openai.completion.chat.*; import org.junit.jupiter.api.Test; @@ -84,6 +86,38 @@ void streamChatCompletion() { assertNotNull(chunks.get(0).getChoices().get(0)); } + @Test + void createChatCompletionWithJsonMode() { + final List messages = new ArrayList<>(); + final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You will generate a random name and return it in JSON format."); + messages.add(systemMessage); + + ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest + .builder() + .model("gpt-3.5-turbo-1106") + .messages(messages) + .responseFormat(ChatResponseFormat.builder().type(ChatResponseFormat.ResponseFormat.JSON).build()) + .maxTokens(50) + .logitBias(new HashMap<>()) + .build(); + + System.out.println(chatCompletionRequest); + + ChatCompletionChoice choice = service.createChatCompletion(chatCompletionRequest).getChoices().get(0); + System.out.println(choice.getMessage().getContent()); + assertTrue(isValidJson(choice.getMessage().getContent()), "Response is not valid JSON"); + } + + private boolean isValidJson(String jsonString) { + ObjectMapper objectMapper = new ObjectMapper(); + try { + objectMapper.readTree(jsonString); + return true; + } catch (JsonProcessingException e) { + return false; + } + } + @Test void createChatCompletionWithFunctions() { final List functions = Collections.singletonList(ChatFunction.builder() From 6b84feef80491fdfe7d1f975efff5af7ddf01945 Mon Sep 17 00:00:00 2001 From: Aleksije Micic Date: Sat, 6 Jan 2024 20:46:17 +0100 Subject: [PATCH 3/3] Removed useless print statements from test --- .../com/theokanning/openai/service/ChatCompletionTest.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java index 89023ade..cd05751b 100644 --- a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java +++ b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java @@ -101,10 +101,7 @@ void createChatCompletionWithJsonMode() { .logitBias(new HashMap<>()) .build(); - System.out.println(chatCompletionRequest); - ChatCompletionChoice choice = service.createChatCompletion(chatCompletionRequest).getChoices().get(0); - System.out.println(choice.getMessage().getContent()); assertTrue(isValidJson(choice.getMessage().getContent()), "Response is not valid JSON"); }