From 3cb2c7c275761a24be9403a6a2b41d0725ba8d9b Mon Sep 17 00:00:00 2001 From: Minh Nguyen Cong Date: Fri, 6 Sep 2024 15:11:56 +0200 Subject: [PATCH] feat: Support AI Agent (#1265) --- doc/ai.md | 35 ++- src/intTest/java/com/box/sdk/BoxAIIT.java | 51 ++++ src/main/java/com/box/sdk/BoxAI.java | 123 ++++++++- src/main/java/com/box/sdk/BoxAIAgent.java | 116 +++++++++ src/main/java/com/box/sdk/BoxAIAgentAsk.java | 161 ++++++++++++ .../com/box/sdk/BoxAIAgentAskBasicText.java | 185 ++++++++++++++ .../com/box/sdk/BoxAIAgentAskLongText.java | 212 ++++++++++++++++ .../com/box/sdk/BoxAIAgentEmbeddings.java | 93 +++++++ .../box/sdk/BoxAIAgentEmbeddingsStrategy.java | 86 +++++++ .../box/sdk/BoxAIAgentLLMEndpointParams.java | 81 ++++++ .../BoxAIAgentLLMEndpointParamsGoogle.java | 137 ++++++++++ .../BoxAIAgentLLMEndpointParamsOpenAI.java | 193 ++++++++++++++ .../java/com/box/sdk/BoxAIAgentTextGen.java | 70 ++++++ .../box/sdk/BoxAIAgentTextGenBasicGen.java | 235 ++++++++++++++++++ .../java/com/box/sdk/BoxAIDialogueEntry.java | 1 - .../java/com/box/sdk/BoxCollaborator.java | 33 ++- .../com/box/sdk/internal/utils/JsonUtils.java | 22 ++ .../BoxAI/GetAIAgentDefaultConfigAsk200.json | 73 ++++++ .../GetAIAgentDefaultConfigTextGen200.json | 25 ++ src/test/java/com/box/sdk/BoxAITest.java | 119 +++++++++ 20 files changed, 2022 insertions(+), 29 deletions(-) create mode 100644 src/main/java/com/box/sdk/BoxAIAgent.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentAsk.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentAskBasicText.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentAskLongText.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentEmbeddings.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentEmbeddingsStrategy.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParams.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsGoogle.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsOpenAI.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentTextGen.java create mode 100644 src/main/java/com/box/sdk/BoxAIAgentTextGenBasicGen.java create mode 100644 src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigAsk200.json create mode 100644 src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigTextGen200.json diff --git a/doc/ai.md b/doc/ai.md index b21110f4e..e51f93468 100644 --- a/doc/ai.md +++ b/doc/ai.md @@ -7,8 +7,10 @@ an answer based on the provided prompt and items. -- [Send AI request](#send-ai-request) -- [Send AI text generation request](#send-ai-text-generation-request) +- [AI](#ai) + - [Send AI request](#send-ai-request) + - [Send AI text generation request](#send-ai-text-generation-request) + - [Get AI Agent default configuration](#get-ai-agent-default-configuration) @@ -26,15 +28,17 @@ for a single or multiple items. BoxAIResponse response = BoxAI.sendAIRequest( api, "What is the content of the file?", - Collections.singletonList("123456", BoxAIItem.Type.FILE)), + Collections.singletonList("123456", BoxAIItem.Type.FILE), BoxAI.Mode.SINGLE_ITEM_QA ); ``` +You can also provide a list of dialogue history entries to provide additional context to the LLM in generating the response, AI Agent configuration and flag to indicate whether citations should be returned. + NOTE: The AI endpoint may return a 412 status code if you use for your request a file which has just been updated to the box. It usually takes a few seconds for the file to be indexed and available for the AI endpoint. -[send-ai-request]: http://opensource.box.com/box-java-sdk/javadoc/com/box/sdk/BoxAI.html#sendAIRequest-com.box.sdk.BoxAPIConnection-java.lang.String- +[send-ai-request]: https://opensource.box.com/box-java-sdk/javadoc/com/box/sdk/BoxAI.html#sendAIRequest-com.box.sdk.BoxAPIConnection-java.lang.String-java.util.List-com.box.sdk.BoxAI.Mode- Send AI text generation request -------------- @@ -62,4 +66,25 @@ BoxAIResponse response = BoxAI.sendAITextGenRequest( ); ``` -[send-ai-text-gen-request]: http://opensource.box.com/box-java-sdk/javadoc/com/box/sdk/BoxAI.html#sendAITextGenRequest-com.box.sdk.BoxAPIConnection-java.lang.String- \ No newline at end of file +You can also provide an AI Agent configuration to customize the behavior of the AI response generation. + +[send-ai-text-gen-request]: https://opensource.box.com/box-java-sdk/javadoc/com/box/sdk/BoxAI.html#sendAITextGenRequest-com.box.sdk.BoxAPIConnection-java.lang.String-java.util.List-java.util.List- + +Get AI Agent default configuration +-------------------------- + +To get the default configuration of the AI Agent, call static +[`getAiAgentDefaultConfig(BoxAPIConnection api, BoxAIAgent.Mode mode, String language, String model)`][get-ai-agent-default-config] method. +In the request you have to provide the mode of the AI Agent, the language and the model, with the model is required while the language and mode are optional. + + +```java +BoxAIAgentConfig config = BoxAI.getAiAgentDefaultConfig( + api, + BoxAIAgent.Mode.ASK, + "en", + "openai__gpt_3_5_turbo" +); +``` + +[get-ai-agent-default-config]: http://opensource.box.com/box-java-sdk/javadoc/com/box/sdk/BoxAI.html#getAiAgentDefaultConfig-com.box.sdk.BoxAPIConnection-com.box.sdk.ai.BoxAIAgent.Mode-java.lang.String-java.lang.String- \ No newline at end of file diff --git a/src/intTest/java/com/box/sdk/BoxAIIT.java b/src/intTest/java/com/box/sdk/BoxAIIT.java index 9fb181884..b0c137aef 100644 --- a/src/intTest/java/com/box/sdk/BoxAIIT.java +++ b/src/intTest/java/com/box/sdk/BoxAIIT.java @@ -141,4 +141,55 @@ public void askAITextGenItemWithDialogueHistory() throws ParseException, Interru deleteFile(uploadedFile); } } + + @Test + public void getAIAgentDefaultConfiguration() { + BoxAPIConnection api = jwtApiForServiceAccount(); + BoxAIAgent agent = BoxAI.getAiAgentDefaultConfig(api, BoxAIAgent.Mode.ASK, + "en", "openai__gpt_3_5_turbo"); + BoxAIAgentAsk askAgent = (BoxAIAgentAsk) agent; + + assertThat(askAgent.getType(), is(equalTo(BoxAIAgentAsk.TYPE))); + assertThat(askAgent.getBasicText().getModel(), is(equalTo("openai__gpt_3_5_turbo"))); + + BoxAIAgent agent2 = BoxAI.getAiAgentDefaultConfig(api, BoxAIAgent.Mode.TEXT_GEN, + "en", "openai__gpt_3_5_turbo"); + BoxAIAgentTextGen textGenAgent = (BoxAIAgentTextGen) agent2; + + assertThat(textGenAgent.getType(), is(equalTo(BoxAIAgentTextGen.TYPE))); + assertThat(textGenAgent.getBasicGen().getModel(), is(equalTo("openai__gpt_3_5_turbo"))); + } + + @Test + public void askAISingleItemWithAgent() throws InterruptedException { + BoxAPIConnection api = jwtApiForServiceAccount(); + String fileName = "[askAISingleItem] Test File.txt"; + BoxFile uploadedFile = uploadFileToUniqueFolder(api, fileName, "Test file"); + BoxAIAgent agent = BoxAI.getAiAgentDefaultConfig(api, BoxAIAgent.Mode.ASK, + "en", "openai__gpt_3_5_turbo_16k"); + BoxAIAgentAsk askAgent = (BoxAIAgentAsk) agent; + + try { + BoxFile.Info uploadedFileInfo = uploadedFile.getInfo(); + // When a file has been just uploaded, AI service may not be ready to return text response + // and 412 is returned + retry(() -> { + BoxAIResponse response = BoxAI.sendAIRequest( + api, + "What is the name of the file?", + Collections.singletonList(new BoxAIItem(uploadedFileInfo.getID(), BoxAIItem.Type.FILE)), + BoxAI.Mode.SINGLE_ITEM_QA, + null, + askAgent, + true + ); + assertThat(response.getAnswer(), containsString("Test file")); + assert response.getCreatedAt().before(new Date(System.currentTimeMillis())); + assertThat(response.getCompletionReason(), equalTo("done")); + }, 2, 2000); + + } finally { + deleteFile(uploadedFile); + } + } } diff --git a/src/main/java/com/box/sdk/BoxAI.java b/src/main/java/com/box/sdk/BoxAI.java index 843c5a09e..0dc24fa53 100644 --- a/src/main/java/com/box/sdk/BoxAI.java +++ b/src/main/java/com/box/sdk/BoxAI.java @@ -18,6 +18,10 @@ public final class BoxAI { * Text gen AI url. */ public static final URLTemplate SEND_AI_TEXT_GEN_REQUEST_URL = new URLTemplate("ai/text_gen"); + /** + * AI agent default config url. + */ + public static final URLTemplate AI_AGENT_DEFAULT_CONFIG_URL = new URLTemplate("ai_agent_default"); private BoxAI() { } @@ -25,13 +29,34 @@ private BoxAI() { /** * Sends an AI request to supported LLMs and returns an answer specifically focused * on the user's question given the provided items. - * @param api the API connection to be used by the created user. + * + * @param api the API connection to be used by the created user. * @param prompt The prompt provided by the client to be answered by the LLM. - * @param items The items to be processed by the LLM, currently only files are supported. - * @param mode The mode specifies if this request is for a single or multiple items. + * @param items The items to be processed by the LLM, currently only files are supported. + * @param mode The mode specifies if this request is for a single or multiple items. * @return The response from the AI. */ public static BoxAIResponse sendAIRequest(BoxAPIConnection api, String prompt, List items, Mode mode) { + return sendAIRequest(api, prompt, items, mode, null, null, null); + } + + /** + * Sends an AI request to supported LLMs and returns an answer specifically focused + * on the user's question given the provided items. + * + * @param api the API connection to be used by the created user. + * @param prompt The prompt provided by the client to be answered by the LLM. + * @param items The items to be processed by the LLM, currently only files are supported. + * @param mode The mode specifies if this request is for a single or multiple items. + * @param dialogueHistory The history of prompts and answers previously passed to the LLM. + * This provides additional context to the LLM in generating the response. + * @param agent The AI agent configuration to be used for the request. + * @param includeCitations Whether to include citations in the response. + * @return The response from the AI. + */ + public static BoxAIResponse sendAIRequest(BoxAPIConnection api, String prompt, List items, Mode mode, + List dialogueHistory, BoxAIAgentAsk agent, + Boolean includeCitations) { URL url = SEND_AI_REQUEST_URL.build(api.getBaseURL()); JsonObject requestJSON = new JsonObject(); requestJSON.add("mode", mode.toString()); @@ -43,6 +68,20 @@ public static BoxAIResponse sendAIRequest(BoxAPIConnection api, String prompt, L } requestJSON.add("items", itemsJSON); + if (dialogueHistory != null) { + JsonArray dialogueHistoryJSON = new JsonArray(); + for (BoxAIDialogueEntry dialogueEntry : dialogueHistory) { + dialogueHistoryJSON.add(dialogueEntry.getJSONObject()); + } + requestJSON.add("dialogue_history", dialogueHistoryJSON); + } + if (agent != null) { + requestJSON.add("ai_agent", agent.getJSONObject()); + } + if (includeCitations != null) { + requestJSON.add("include_citations", includeCitations); + } + BoxJSONRequest req = new BoxJSONRequest(api, url, HttpMethod.POST); req.setBody(requestJSON.toString()); @@ -54,9 +93,10 @@ public static BoxAIResponse sendAIRequest(BoxAPIConnection api, String prompt, L /** * Sends an AI request to supported LLMs and returns an answer specifically focused on the creation of new text. - * @param api the API connection to be used by the created user. + * + * @param api the API connection to be used by the created user. * @param prompt The prompt provided by the client to be answered by the LLM. - * @param items The items to be processed by the LLM, currently only files are supported. + * @param items The items to be processed by the LLM, currently only files are supported. * @return The response from the AI. */ public static BoxAIResponse sendAITextGenRequest(BoxAPIConnection api, String prompt, List items) { @@ -65,16 +105,33 @@ public static BoxAIResponse sendAITextGenRequest(BoxAPIConnection api, String pr /** * Sends an AI request to supported LLMs and returns an answer specifically focused on the creation of new text. - * @param api the API connection to be used by the created user. - * @param prompt The prompt provided by the client to be answered by the LLM. - * @param items The items to be processed by the LLM, currently only files are supported. + * + * @param api the API connection to be used by the created user. + * @param prompt The prompt provided by the client to be answered by the LLM. + * @param items The items to be processed by the LLM, currently only files are supported. * @param dialogueHistory The history of prompts and answers previously passed to the LLM. * This provides additional context to the LLM in generating the response. * @return The response from the AI. */ - public static BoxAIResponse sendAITextGenRequest( - BoxAPIConnection api, String prompt, List items, List dialogueHistory - ) { + public static BoxAIResponse sendAITextGenRequest(BoxAPIConnection api, String prompt, List items, + List dialogueHistory) { + return sendAITextGenRequest(api, prompt, items, dialogueHistory, null); + } + + /** + * Sends an AI request to supported LLMs and returns an answer specifically focused on the creation of new text. + * + * @param api the API connection to be used by the created user. + * @param prompt The prompt provided by the client to be answered by the LLM. + * @param items The items to be processed by the LLM, currently only files are supported. + * @param dialogueHistory The history of prompts and answers previously passed to the LLM. + * This provides additional context to the LLM in generating the response. + * @param agent The AI agent configuration to be used for the request. + * @return The response from the AI. + */ + public static BoxAIResponse sendAITextGenRequest(BoxAPIConnection api, String prompt, List items, + List dialogueHistory, + BoxAIAgentTextGen agent) { URL url = SEND_AI_TEXT_GEN_REQUEST_URL.build(api.getBaseURL()); JsonObject requestJSON = new JsonObject(); requestJSON.add("prompt", prompt); @@ -93,6 +150,10 @@ public static BoxAIResponse sendAITextGenRequest( requestJSON.add("dialogue_history", dialogueHistoryJSON); } + if (agent != null) { + requestJSON.add("ai_agent", agent.getJSONObject()); + } + BoxJSONRequest req = new BoxJSONRequest(api, url, HttpMethod.POST); req.setBody(requestJSON.toString()); @@ -102,6 +163,46 @@ public static BoxAIResponse sendAITextGenRequest( } } + /** + * Get the default AI Agent use for the given mode. + * + * @param api The API connection to be used by the created user. + * @param mode The mode to filter the agent config to return. + * @return A successful response including the default agent configuration. + */ + public static BoxAIAgent getAiAgentDefaultConfig(BoxAPIConnection api, BoxAIAgent.Mode mode) { + return getAiAgentDefaultConfig(api, mode, null, null); + } + + /** + * Get the default AI Agent use for the given mode. + * + * @param api The API connection to be used by the created user. + * @param mode The mode to filter the agent config to return. + * @param language The language to filter the agent config to return. + * @param model The model to filter the agent config to return. + * @return A successful response including the default agent configuration. + */ + public static BoxAIAgent getAiAgentDefaultConfig(BoxAPIConnection api, + BoxAIAgent.Mode mode, + String language, + String model) { + QueryStringBuilder builder = new QueryStringBuilder(); + builder.appendParam("mode", mode.toString()); + if (language != null) { + builder.appendParam("language", language); + } + if (model != null) { + builder.appendParam("model", model); + } + URL url = AI_AGENT_DEFAULT_CONFIG_URL.buildWithQuery(api.getBaseURL(), builder.toString()); + BoxAPIRequest req = new BoxAPIRequest(api, url, HttpMethod.GET); + try (BoxJSONResponse response = (BoxJSONResponse) req.send()) { + JsonObject responseJSON = Json.parse(response.getJSON()).asObject(); + return BoxAIAgent.parse(responseJSON); + } + } + public enum Mode { /** * Multiple items diff --git a/src/main/java/com/box/sdk/BoxAIAgent.java b/src/main/java/com/box/sdk/BoxAIAgent.java new file mode 100644 index 000000000..08315cefa --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgent.java @@ -0,0 +1,116 @@ +package com.box.sdk; + +import com.eclipsesource.json.JsonObject; +import com.eclipsesource.json.JsonValue; + +public abstract class BoxAIAgent extends BoxJSONObject { + /** + * The type of the AI agent. + * Value can be "ai_agent_ask" or "ai_agent_text_gen". + */ + private String type; + + /** + * Constructs an AI agent with default settings. + * @param type The type of the AI agent. + * Value can be "ai_agent_ask" or "ai_agent_text_gen". + */ + public BoxAIAgent(String type) { + super(); + this.type = type; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgent(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public static BoxAIAgent parse(JsonObject jsonObject) { + String type = jsonObject.get("type").asString(); + if (type.equals(BoxAIAgentAsk.TYPE)) { + return new BoxAIAgentAsk(jsonObject); + } else if (type.equals(BoxAIAgentTextGen.TYPE)) { + return new BoxAIAgentTextGen(jsonObject); + } else { + throw new IllegalArgumentException("Invalid AI agent type: " + type); + } + } + + /** + * Gets the type of the AI agent. + * @return The type of the AI agent. + */ + public String getType() { + return type; + } + + /** + * Sets the type of the AI agent. + * @param type The type of the AI agent. + */ + public void setType(String type) { + this.type = type; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + JsonValue value = member.getValue(); + if (memberName.equals("type")) { + this.type = value.asString(); + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + jsonObject.add("type", this.type); + return jsonObject; + } + + /** + * The type of the AI agent for asking questions. + */ + public enum Mode { + /** + * The type of AI agent used to handle queries. + */ + ASK("ask"), + /** + * The type of AI agent used for generating text. + */ + TEXT_GEN("text_gen"); + + private final String value; + + Mode(String value) { + this.value = value; + } + + static BoxAIAgent.Mode fromJSONValue(String value) { + if (value.equals("ask")) { + return ASK; + } else if (value.equals("text_gen")) { + return TEXT_GEN; + } else { + throw new IllegalArgumentException("Invalid AI agent mode: " + value); + } + } + + String toJSONValue() { + return this.value; + } + + @Override + public String toString() { + return this.value; + } + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentAsk.java b/src/main/java/com/box/sdk/BoxAIAgentAsk.java new file mode 100644 index 000000000..882efef9b --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentAsk.java @@ -0,0 +1,161 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; +import com.eclipsesource.json.JsonValue; + +/** + * Represents an AI Agent used to handle queries. + */ +@BoxResourceType("ai_agent_ask") +public class BoxAIAgentAsk extends BoxAIAgent { + + /** + * The type of the AI Ask agent. + */ + public static final String TYPE = "ai_agent_ask"; + + /** + * AI agent tool used to handle basic text. + */ + private BoxAIAgentAskBasicText basicText; + /** + * AI agent tool used to handle basic text. + */ + private BoxAIAgentAskBasicText basicTextMulti; + /** + * AI agent tool used to handle longer text. + */ + private BoxAIAgentAskLongText longText; + /** + * AI agent tool used to handle longer text. + */ + private BoxAIAgentAskLongText longTextMulti; + + /** + * Constructs an AI agent with default settings. + * @param basicText AI agent tool used to handle basic text. + * @param basicTextMulti AI agent tool used to handle basic text. + * @param longText AI agent tool used to handle longer text. + * @param longTextMulti AI agent tool used to handle longer text. + */ + public BoxAIAgentAsk(BoxAIAgentAskBasicText basicText, BoxAIAgentAskBasicText basicTextMulti, + BoxAIAgentAskLongText longText, BoxAIAgentAskLongText longTextMulti) { + super(TYPE); + this.basicText = basicText; + this.basicTextMulti = basicTextMulti; + this.longText = longText; + this.longTextMulti = longTextMulti; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentAsk(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the AI agent tool used to handle basic text. + * @return The AI agent tool used to handle basic text. + */ + public BoxAIAgentAskBasicText getBasicText() { + return basicText; + } + + /** + * Sets the AI agent tool used to handle basic text. + * @param basicText The AI agent tool used to handle basic text. + */ + public void setBasicText(BoxAIAgentAskBasicText basicText) { + this.basicText = basicText; + } + + /** + * Gets the AI agent tool used to handle basic text. + * @return The AI agent tool used to handle basic text. + */ + public BoxAIAgentAskBasicText getBasicTextMulti() { + return basicTextMulti; + } + + /** + * Sets the AI agent tool used to handle basic text. + * @param basicTextMulti The AI agent tool used to handle basic text. + */ + public void setBasicTextMulti(BoxAIAgentAskBasicText basicTextMulti) { + this.basicTextMulti = basicTextMulti; + } + + /** + * Gets the AI agent tool used to handle longer text. + * @return The AI agent tool used to handle longer text. + */ + public BoxAIAgentAskLongText getLongText() { + return longText; + } + + /** + * Sets the AI agent tool used to handle longer text. + * @param longText The AI agent tool used to handle longer text. + */ + public void setLongText(BoxAIAgentAskLongText longText) { + this.longText = longText; + } + + /** + * Gets the AI agent tool used to handle longer text. + * @return The AI agent tool used to handle longer text. + */ + public BoxAIAgentAskLongText getLongTextMulti() { + return longTextMulti; + } + + /** + * Sets the AI agent tool used to handle longer text. + * @param longTextMulti The AI agent tool used to handle longer text. + */ + public void setLongTextMulti(BoxAIAgentAskLongText longTextMulti) { + this.longTextMulti = longTextMulti; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + JsonValue memberValue = member.getValue(); + try { + switch (memberName) { + case "basic_text": + this.basicText = new BoxAIAgentAskBasicText(memberValue.asObject()); + break; + case "basic_text_multi": + this.basicTextMulti = new BoxAIAgentAskBasicText(memberValue.asObject()); + break; + case "long_text": + this.longText = new BoxAIAgentAskLongText(memberValue.asObject()); + break; + case "long_text_multi": + this.longTextMulti = new BoxAIAgentAskLongText(memberValue.asObject()); + break; + default: + break; + } + } catch (Exception e) { + throw new BoxAPIException("Could not parse JSON response.", e); + } + } + + @Override + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "type", this.getType()); + JsonUtils.addIfNotNull(jsonObject, "basic_text", this.basicText.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "basic_text_multi", this.basicTextMulti.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "long_text", this.longText.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "long_text_multi", this.longTextMulti.getJSONObject()); + return jsonObject; + } +} + diff --git a/src/main/java/com/box/sdk/BoxAIAgentAskBasicText.java b/src/main/java/com/box/sdk/BoxAIAgentAskBasicText.java new file mode 100644 index 000000000..32c3b354e --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentAskBasicText.java @@ -0,0 +1,185 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; +import com.eclipsesource.json.JsonValue; + +/** + * AI agent tool used to handle basic text. + */ +public class BoxAIAgentAskBasicText extends BoxJSONObject { + /** + * The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + private BoxAIAgentLLMEndpointParams llmEndpointParams; + /** + * The model used for the AI Agent for basic text. + */ + private String model; + /** + * The number of tokens for completion. + */ + private int numTokensForCompletion; + /** + * The prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + */ + private String promptTemplate; + /** + * System messages try to help the LLM "understand" its role and what it is supposed to do. + */ + private String systemMessage; + + /** + * Constructs an AI agent with default settings. + * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models. + * @param model The model used for the AI Agent for basic text. + * @param numTokensForCompletion The number of tokens for completion. + * @param promptTemplate The prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @param systemMessage System messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public BoxAIAgentAskBasicText(BoxAIAgentLLMEndpointParams llmEndpointParams, String model, + int numTokensForCompletion, String promptTemplate, String systemMessage) { + super(); + this.llmEndpointParams = llmEndpointParams; + this.model = model; + this.numTokensForCompletion = numTokensForCompletion; + this.promptTemplate = promptTemplate; + this.systemMessage = systemMessage; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentAskBasicText(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the parameters for the LLM endpoint specific to OpenAI / Google models. + * @return The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + public BoxAIAgentLLMEndpointParams getLlmEndpointParams() { + return llmEndpointParams; + } + + /** + * Sets the parameters for the LLM endpoint specific to OpenAI / Google models. + * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + public void setLlmEndpointParams(BoxAIAgentLLMEndpointParams llmEndpointParams) { + this.llmEndpointParams = llmEndpointParams; + } + + /** + * Gets the model used for the AI Agent for basic text. + * @return The model used for the AI Agent for basic text. + */ + public String getModel() { + return model; + } + + /** + * Sets the model used for the AI Agent for basic text. + * @param model The model used for the AI Agent for basic text. + */ + public void setModel(String model) { + this.model = model; + } + + /** + * Gets the number of tokens for completion. + * @return The number of tokens for completion. + */ + public int getNumTokensForCompletion() { + return numTokensForCompletion; + } + + /** + * Sets the number of tokens for completion. + * @param numTokensForCompletion The number of tokens for completion. + */ + public void setNumTokensForCompletion(int numTokensForCompletion) { + this.numTokensForCompletion = numTokensForCompletion; + } + + /** + * Gets the prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @return The prompt template contains contextual information of the request and the user prompt. + */ + public String getPromptTemplate() { + return promptTemplate; + } + + /** + * Sets the prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @param promptTemplate The prompt template contains contextual information of the request and the user prompt. + */ + public void setPromptTemplate(String promptTemplate) { + this.promptTemplate = promptTemplate; + } + + /** + * Gets the system messages try to help the LLM "understand" its role and what it is supposed to do. + * @return The system messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public String getSystemMessage() { + return systemMessage; + } + + /** + * Sets the system messages try to help the LLM "understand" its role and what it is supposed to do. + * @param systemMessage The system messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public void setSystemMessage(String systemMessage) { + this.systemMessage = systemMessage; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + JsonValue memberValue = member.getValue(); + try { + switch (memberName) { + case "llm_endpoint_params": + this.llmEndpointParams = BoxAIAgentLLMEndpointParams.parse(memberValue.asObject()); + break; + case "model": + this.model = memberValue.asString(); + break; + case "num_tokens_for_completion": + this.numTokensForCompletion = memberValue.asInt(); + break; + case "prompt_template": + this.promptTemplate = memberValue.asString(); + break; + case "system_message": + this.systemMessage = memberValue.asString(); + break; + default: + break; + } + } catch (Exception e) { + throw new BoxDeserializationException(memberName, memberValue.toString(), e); + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "llm_endpoint_params", this.llmEndpointParams.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "model", this.model); + JsonUtils.addIfNotNull(jsonObject, "num_tokens_for_completion", this.numTokensForCompletion); + JsonUtils.addIfNotNull(jsonObject, "prompt_template", this.promptTemplate); + JsonUtils.addIfNotNull(jsonObject, "system_message", this.systemMessage); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentAskLongText.java b/src/main/java/com/box/sdk/BoxAIAgentAskLongText.java new file mode 100644 index 000000000..a73219ddf --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentAskLongText.java @@ -0,0 +1,212 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * Represents an AI agent tool used to handle longer text. + */ +public class BoxAIAgentAskLongText extends BoxJSONObject { + /** + * Embeddings used by the AI agent. + */ + private BoxAIAgentEmbeddings embeddings; + /** + * The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + private BoxAIAgentLLMEndpointParams llmEndpointParams; + /** + * The model used for the AI Agent for basic text. + */ + private String model; + /** + * The number of tokens for completion. + */ + private int numTokensForCompletion; + /** + * The prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + */ + private String promptTemplate; + /** + * System messages try to help the LLM "understand" its role and what it is supposed to do. + */ + private String systemMessage; + + /** + * Constructs an AI agent with default settings. + * @param embeddings Embeddings used by the AI agent. + * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models. + * Value can be "google_params" or "openai_params". + * @param model The model used for the AI Agent for basic text. + * @param numTokensForCompletion The number of tokens for completion. + * @param promptTemplate The prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @param systemMessage System messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public BoxAIAgentAskLongText(BoxAIAgentEmbeddings embeddings, + BoxAIAgentLLMEndpointParams llmEndpointParams, + String model, int numTokensForCompletion, + String promptTemplate, + String systemMessage) { + this.embeddings = embeddings; + this.llmEndpointParams = llmEndpointParams; + this.model = model; + this.numTokensForCompletion = numTokensForCompletion; + this.promptTemplate = promptTemplate; + this.systemMessage = systemMessage; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentAskLongText(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the embeddings used by the AI agent. + * @return The embeddings used by the AI agent. + */ + public BoxAIAgentEmbeddings getEmbeddings() { + return embeddings; + } + + /** + * Sets the embeddings used by the AI agent. + * @param embeddings The embeddings used by the AI agent. + */ + public void setEmbeddings(BoxAIAgentEmbeddings embeddings) { + this.embeddings = embeddings; + } + + /** + * Gets the parameters for the LLM endpoint specific to OpenAI / Google models. + * @return The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + public BoxAIAgentLLMEndpointParams getLlmEndpointParams() { + return llmEndpointParams; + } + + /** + * Sets the parameters for the LLM endpoint specific to OpenAI / Google models. + * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + public void setLlmEndpointParams(BoxAIAgentLLMEndpointParams llmEndpointParams) { + this.llmEndpointParams = llmEndpointParams; + } + + /** + * Gets the model used for the AI Agent for basic text. + * @return The model used for the AI Agent for basic text. + */ + public String getModel() { + return model; + } + + /** + * Sets the model used for the AI Agent for basic text. + * @param model The model used for the AI Agent for basic text. + */ + public void setModel(String model) { + this.model = model; + } + + /** + * Gets the number of tokens for completion. + * @return The number of tokens for completion. + */ + public int getNumTokensForCompletion() { + return numTokensForCompletion; + } + + /** + * Sets the number of tokens for completion. + * @param numTokensForCompletion The number of tokens for completion. + */ + public void setNumTokensForCompletion(int numTokensForCompletion) { + this.numTokensForCompletion = numTokensForCompletion; + } + + /** + * Gets the prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @return The prompt template contains contextual information of the request and the user prompt. + */ + public String getPromptTemplate() { + return promptTemplate; + } + + /** + * Sets the prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @param promptTemplate The prompt template contains contextual information of the request and the user prompt. + */ + public void setPromptTemplate(String promptTemplate) { + this.promptTemplate = promptTemplate; + } + + /** + * Gets the system messages try to help the LLM "understand" its role and what it is supposed to do. + * @return The system messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public String getSystemMessage() { + return systemMessage; + } + + /** + * Sets the system messages try to help the LLM "understand" its role and what it is supposed to do. + * @param systemMessage The system messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public void setSystemMessage(String systemMessage) { + this.systemMessage = systemMessage; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + try { + switch (memberName) { + case "embeddings": + this.embeddings = new BoxAIAgentEmbeddings(member.getValue().asObject()); + break; + case "llm_endpoint_params": + this.llmEndpointParams = BoxAIAgentLLMEndpointParams.parse(member.getValue().asObject()); + break; + case "model": + this.model = member.getValue().asString(); + break; + case "num_tokens_for_completion": + this.numTokensForCompletion = member.getValue().asInt(); + break; + case "prompt_template": + this.promptTemplate = member.getValue().asString(); + break; + case "system_message": + this.systemMessage = member.getValue().asString(); + break; + default: + break; + } + } catch (Exception e) { + throw new BoxAPIException("Could not parse JSON response.", e); + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "embeddings", this.embeddings.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "llm_endpoint_params", this.llmEndpointParams.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "model", this.model); + JsonUtils.addIfNotNull(jsonObject, "num_tokens_for_completion", this.numTokensForCompletion); + JsonUtils.addIfNotNull(jsonObject, "prompt_template", this.promptTemplate); + JsonUtils.addIfNotNull(jsonObject, "system_message", this.systemMessage); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentEmbeddings.java b/src/main/java/com/box/sdk/BoxAIAgentEmbeddings.java new file mode 100644 index 000000000..f4ba56e0d --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentEmbeddings.java @@ -0,0 +1,93 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; +import com.eclipsesource.json.JsonValue; + +/** + * Represents the embeddings used by an AI agent. + */ +public class BoxAIAgentEmbeddings extends BoxJSONObject { + /** + * The model used for the AI Agent for calculating embeddings. + */ + private String model; + /** + * The strategy used for the AI Agent for calculating embeddings. + */ + private BoxAIAgentEmbeddingsStrategy strategy; + + /** + * Constructs an AI agent with default settings. + * @param model The model used for the AI Agent for calculating embeddings. + * @param strategy The strategy used for the AI Agent for calculating embeddings. + */ + public BoxAIAgentEmbeddings(String model, BoxAIAgentEmbeddingsStrategy strategy) { + this.model = model; + this.strategy = strategy; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentEmbeddings(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the model used for the AI Agent for calculating embeddings. + * @return The model used for the AI Agent for calculating embeddings. + */ + public String getModel() { + return model; + } + + /** + * Sets the model used for the AI Agent for calculating embeddings. + * @param model The model used for the AI Agent for calculating embeddings. + */ + public void setModel(String model) { + this.model = model; + } + + /** + * Gets the strategy used for the AI Agent for calculating embeddings. + * @return The strategy used for the AI Agent for calculating embeddings. + */ + public BoxAIAgentEmbeddingsStrategy getStrategy() { + return strategy; + } + + /** + * Sets the strategy used for the AI Agent for calculating embeddings. + * @param strategy The strategy used for the AI Agent for calculating embeddings. + */ + public void setStrategy(BoxAIAgentEmbeddingsStrategy strategy) { + this.strategy = strategy; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + JsonValue value = member.getValue(); + switch (memberName) { + case "model": + this.model = member.getValue().asString(); + break; + case "strategy": + this.strategy = new BoxAIAgentEmbeddingsStrategy(value.asObject()); + break; + default: + break; + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "model", this.model); + JsonUtils.addIfNotNull(jsonObject, "strategy", this.strategy.getJSONObject()); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentEmbeddingsStrategy.java b/src/main/java/com/box/sdk/BoxAIAgentEmbeddingsStrategy.java new file mode 100644 index 000000000..4d2bbecfe --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentEmbeddingsStrategy.java @@ -0,0 +1,86 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * Represents the strategy used for the AI Agent for calculating embeddings. + */ +public class BoxAIAgentEmbeddingsStrategy extends BoxJSONObject { + /** + * The ID of the strategy used for the AI Agent for calculating embeddings. + */ + private String id; + /** + * The number of tokens per chunk used for the AI Agent for calculating embeddings. + */ + private int numTokensPerChunk; + + /** + * Constructs an AI agent with default settings. + * @param id The ID of the strategy used for the AI Agent for calculating embeddings. + * @param numTokensPerChunk The number of tokens per chunk used for the AI Agent for calculating embeddings. + */ + public BoxAIAgentEmbeddingsStrategy(String id, int numTokensPerChunk) { + this.id = id; + this.numTokensPerChunk = numTokensPerChunk; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentEmbeddingsStrategy(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the ID of the strategy used for the AI Agent for calculating embeddings. + * @return The ID of the strategy used for the AI Agent for calculating embeddings. + */ + public String getId() { + return id; + } + + /** + * Sets the ID of the strategy used for the AI Agent for calculating embeddings. + * @param id The ID of the strategy used for the AI Agent for calculating embeddings. + */ + public void setId(String id) { + this.id = id; + } + + /** + * Gets the number of tokens per chunk used for the AI Agent for calculating embeddings. + * @return The number of tokens per chunk used for the AI Agent for calculating embeddings. + */ + public int getNumTokensPerChunk() { + return numTokensPerChunk; + } + + /** + * Sets the number of tokens per chunk used for the AI Agent for calculating embeddings. + * @param numTokensPerChunk The number of tokens per chunk used for the AI Agent for calculating embeddings. + */ + public void setNumTokensPerChunk(int numTokensPerChunk) { + this.numTokensPerChunk = numTokensPerChunk; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + if (memberName.equals("id")) { + this.id = member.getValue().asString(); + } else if (memberName.equals("num_tokens_per_chunk")) { + this.numTokensPerChunk = member.getValue().asInt(); + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "id", this.id); + JsonUtils.addIfNotNull(jsonObject, "num_tokens_per_chunk", this.numTokensPerChunk); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParams.java b/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParams.java new file mode 100644 index 000000000..9d1428e73 --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParams.java @@ -0,0 +1,81 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * The parameters for the LLM endpoint specific to OpenAI / Google models. + */ +public class BoxAIAgentLLMEndpointParams extends BoxJSONObject { + /** + * The type of the LLM endpoint parameters. + * Value can be "google_params" or "openai_params". + */ + private String type; + + /** + * Constructs LLM endpoint parameters with default settings. + */ + public BoxAIAgentLLMEndpointParams(String type) { + super(); + this.type = type; + } + + /** + * Constructs LLM endpoint parameters with default settings. + * @param jsonObject JSON object representing the LLM endpoint parameters. + */ + public BoxAIAgentLLMEndpointParams(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Parses a JSON object representing LLM endpoint parameters. + * @param jsonObject JSON object representing the LLM endpoint parameters. + * @return The LLM endpoint parameters parsed from the JSON object. + */ + public static BoxAIAgentLLMEndpointParams parse(JsonObject jsonObject) { + String type = jsonObject.get("type").asString(); + switch (type) { + case BoxAIAgentLLMEndpointParamsGoogle.TYPE: + return new BoxAIAgentLLMEndpointParamsGoogle(jsonObject); + case BoxAIAgentLLMEndpointParamsOpenAI.TYPE: + return new BoxAIAgentLLMEndpointParamsOpenAI(jsonObject); + default: + throw new IllegalArgumentException("Invalid LLM endpoint params type: " + type); + } + } + + /** + * Gets the type of the LLM endpoint parameters. + * @return The type of the LLM endpoint parameters. + */ + public String getType() { + return type; + } + + /** + * Sets the type of the LLM endpoint parameters. + * @param type The type of the LLM endpoint parameters. + */ + public void setType(String type) { + this.type = type; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + if (memberName.equals("type")) { + this.type = member.getValue().asString(); + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "type", this.type); + return jsonObject; + } +} + + diff --git a/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsGoogle.java b/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsGoogle.java new file mode 100644 index 000000000..b7d28bfed --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsGoogle.java @@ -0,0 +1,137 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * Represents the AI LLM endpoint params Google object. + */ +public class BoxAIAgentLLMEndpointParamsGoogle extends BoxAIAgentLLMEndpointParams { + + /** + * The type of the LLM endpoint parameters. + */ + public static final String TYPE = "google_params"; + + /** + * The temperature is used for sampling during response generation, which occurs when top-P and top-K are applied. + * Temperature controls the degree of randomness in token selection. + */ + private double temperature; + /** + * Top-K changes how the model selects tokens for output. + * A top-K of 1 means the next selected token is the most probable among all tokens in the model's vocabulary + * (also called greedy decoding), while a top-K of 3 means that the next token is selected from among the three + * most probable tokens by using temperature. + */ + private int topK; + /** + * Top-P changes how the model selects tokens for output. + * Tokens are selected from the most (see top-K) to least probable until the sum of their probabilities equals the + * top-P value. + */ + private double topP; + + /** + * Constructs an AI agent with default settings. + * @param temperature The temperature is used for sampling during response generation, which occurs when top-P and top-K are applied. + * Temperature controls the degree of randomness in token selection. + * @param topK Top-K changes how the model selects tokens for output. + * A top-K of 1 means the next selected token is the most probable among all tokens in the model's vocabulary + * (also called greedy decoding), while a top-K of 3 means that the next token is selected from among the three + * most probable tokens by using temperature. + * @param topP Top-P changes how the model selects tokens for output. + * Tokens are selected from the most (see top-K) to least probable until the sum of their probabilities equals the + * top-P value. + */ + public BoxAIAgentLLMEndpointParamsGoogle(double temperature, int topK, double topP) { + super(TYPE); + this.temperature = temperature; + this.topK = topK; + this.topP = topP; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentLLMEndpointParamsGoogle(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the temperature used for sampling during response generation, which occurs when top-P and top-K are applied. + * @return The temperature used for sampling during response generation, which occurs when top-P and top-K are applied. + */ + public double getTemperature() { + return temperature; + } + + /** + * Sets the temperature used for sampling during response generation, which occurs when top-P and top-K are applied. + * @param temperature The temperature used for sampling during response generation, which occurs when top-P and top-K are applied. + */ + public void setTemperature(double temperature) { + this.temperature = temperature; + } + + /** + * Gets the top-K value. + * @return The top-K value. + */ + public int getTopK() { + return topK; + } + + /** + * Sets the top-K value. + * @param topK The top-K value. + */ + public void setTopK(int topK) { + this.topK = topK; + } + + /** + * Gets the top-P value. + * @return The top-P value. + */ + public double getTopP() { + return topP; + } + + /** + * Sets the top-P value. + * @param topP The top-P value. + */ + public void setTopP(double topP) { + this.topP = topP; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + switch (memberName) { + case "temperature": + this.temperature = member.getValue().asDouble(); + break; + case "top_k": + this.topK = member.getValue().asInt(); + break; + case "top_p": + this.topP = member.getValue().asDouble(); + break; + default: + break; + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "type", this.getType()); + JsonUtils.addIfNotNull(jsonObject, "temperature", this.temperature); + JsonUtils.addIfNotNull(jsonObject, "top_k", this.topK); + JsonUtils.addIfNotNull(jsonObject, "top_p", this.topP); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsOpenAI.java b/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsOpenAI.java new file mode 100644 index 000000000..ec555ce99 --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentLLMEndpointParamsOpenAI.java @@ -0,0 +1,193 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * Represents the AI LLM endpoint params OpenAI object. + */ +public class BoxAIAgentLLMEndpointParamsOpenAI extends BoxAIAgentLLMEndpointParams { + + /** + * The type of the LLM endpoint parameters. + */ + public static final String TYPE = "openai_params"; + + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text + * so far, decreasing the model's likelihood to repeat the same line verbatim. + */ + private double frequencyPenalty; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, + * increasing the model's likelihood to talk about new topics. + */ + private double presencePenalty; + /** + * Up to 4 sequences where the API will stop generating further tokens. + */ + private String stop; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, + * while lower values like 0.2 will make it more focused and deterministic. + * We generally recommend altering this or top_p but not both. + */ + private double temperature; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of + * the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass + * are considered. We generally recommend altering this or temperature but not both. + */ + private double topP; + + /** + * Constructs an AI agent with default settings. + * @param frequencyPenalty Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text + * so far, decreasing the model's likelihood to repeat the same line verbatim. + * @param presencePenalty Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, + * increasing the model's likelihood to talk about new topics. + * @param stop Up to 4 sequences where the API will stop generating further tokens. + * @param temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, + * while lower values like 0.2 will make it more focused and deterministic. + * We generally recommend altering this or top_p but not both. + * @param topP An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of + * the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass + * are considered. We generally recommend altering this or temperature but not both. + */ + public BoxAIAgentLLMEndpointParamsOpenAI(double frequencyPenalty, + double presencePenalty, + String stop, + double temperature, + double topP) { + super(TYPE); + this.frequencyPenalty = frequencyPenalty; + this.presencePenalty = presencePenalty; + this.stop = stop; + this.temperature = temperature; + this.topP = topP; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentLLMEndpointParamsOpenAI(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the frequency penalty. + * @return The frequency penalty. + */ + public double getFrequencyPenalty() { + return frequencyPenalty; + } + + /** + * Sets the frequency penalty. + * @param frequencyPenalty The frequency penalty. + */ + public void setFrequencyPenalty(double frequencyPenalty) { + this.frequencyPenalty = frequencyPenalty; + } + + /** + * Gets the presence penalty. + * @return The presence penalty. + */ + public double getPresencePenalty() { + return presencePenalty; + } + + /** + * Sets the presence penalty. + * @param presencePenalty The presence penalty. + */ + public void setPresencePenalty(double presencePenalty) { + this.presencePenalty = presencePenalty; + } + + /** + * Gets the stop. + * @return The stop. + */ + public String getStop() { + return stop; + } + + /** + * Sets the stop. + * @param stop The stop. + */ + public void setStop(String stop) { + this.stop = stop; + } + + /** + * Gets the temperature. + * @return The temperature. + */ + public double getTemperature() { + return temperature; + } + + /** + * Sets the temperature. + * @param temperature The temperature. + */ + public void setTemperature(double temperature) { + this.temperature = temperature; + } + + /** + * Gets the top-P. + * @return The top-P. + */ + public double getTopP() { + return topP; + } + + /** + * Sets the top-P. + * @param topP The top-P. + */ + public void setTopP(double topP) { + this.topP = topP; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + switch (memberName) { + case "frequency_penalty": + this.frequencyPenalty = member.getValue().asDouble(); + break; + case "presence_penalty": + this.presencePenalty = member.getValue().asDouble(); + break; + case "stop": + this.stop = member.getValue().asString(); + break; + case "temperature": + this.temperature = member.getValue().asDouble(); + break; + case "top_p": + this.topP = member.getValue().asDouble(); + break; + default: + break; + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "type", this.getType()); + JsonUtils.addIfNotNull(jsonObject, "frequency_penalty", this.frequencyPenalty); + JsonUtils.addIfNotNull(jsonObject, "presence_penalty", this.presencePenalty); + JsonUtils.addIfNotNull(jsonObject, "stop", this.stop); + JsonUtils.addIfNotNull(jsonObject, "temperature", this.temperature); + JsonUtils.addIfNotNull(jsonObject, "top_p", this.topP); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentTextGen.java b/src/main/java/com/box/sdk/BoxAIAgentTextGen.java new file mode 100644 index 000000000..fd9128896 --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentTextGen.java @@ -0,0 +1,70 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * Represents an AI Agent used for generating text. + */ +@BoxResourceType("ai_agent_text_gen") +public class BoxAIAgentTextGen extends BoxAIAgent { + + /** + * The type of the AI Agent for generating text. + */ + public static final String TYPE = "ai_agent_text_gen"; + + /** + * The basic generator used for the AI Agent for generating text. + */ + private BoxAIAgentTextGenBasicGen basicGen; + + /** + * Constructs an AI agent with default settings. + * @param basicGen The basic generator used for the AI Agent for generating text. + */ + public BoxAIAgentTextGen(BoxAIAgentTextGenBasicGen basicGen) { + super(TYPE); + this.basicGen = basicGen; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentTextGen(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets the basic generator used for the AI Agent for generating text. + * @return The basic generator used for the AI Agent for generating text. + */ + public BoxAIAgentTextGenBasicGen getBasicGen() { + return basicGen; + } + + /** + * Sets the basic generator used for the AI Agent for generating text. + * @param basicGen The basic generator used for the AI Agent for generating text. + */ + public void setBasicGen(BoxAIAgentTextGenBasicGen basicGen) { + this.basicGen = basicGen; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + if (memberName.equals("basic_gen")) { + this.basicGen = new BoxAIAgentTextGenBasicGen(member.getValue().asObject()); + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "type", this.getType()); + JsonUtils.addIfNotNull(jsonObject, "basic_gen", this.basicGen.getJSONObject()); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIAgentTextGenBasicGen.java b/src/main/java/com/box/sdk/BoxAIAgentTextGenBasicGen.java new file mode 100644 index 000000000..629b4b072 --- /dev/null +++ b/src/main/java/com/box/sdk/BoxAIAgentTextGenBasicGen.java @@ -0,0 +1,235 @@ +package com.box.sdk; + +import com.box.sdk.internal.utils.JsonUtils; +import com.eclipsesource.json.JsonObject; + +/** + * Represents the AI agent basic tool used to generate text. + */ +public class BoxAIAgentTextGenBasicGen extends BoxJSONObject { + /** + * How the content should be included in a request to the LLM. Input for {content} is optional, depending on the use. + */ + private String contentTemplate; + /** + * Embeddings used by the AI agent. + */ + private BoxAIAgentEmbeddings embeddings; + /** + * The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + private BoxAIAgentLLMEndpointParams llmEndpointParams; + /** + * The model used for the AI Agent for generating text. + */ + private String model; + /** + * The number of tokens for completion. + */ + private int numTokensForCompletion; + /** + * The prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + */ + private String promptTemplate; + /** + * System messages try to help the LLM "understand" its role and what it is supposed to do. + */ + private String systemMessage; + + /** + * Constructs an AI agent with default settings. + * @param contentTemplate How the content should be included in a request to the LLM. Input for {content} is optional, depending on the use. + * @param embeddings Embeddings used by the AI agent. + * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models. + * @param model The model used for the AI Agent for generating text. + * @param numTokensForCompletion The number of tokens for completion. + * @param promptTemplate The prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @param systemMessage System messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public BoxAIAgentTextGenBasicGen(String contentTemplate, BoxAIAgentEmbeddings embeddings, + BoxAIAgentLLMEndpointParamsOpenAI llmEndpointParams, String model, + int numTokensForCompletion, String promptTemplate, String systemMessage) { + this.contentTemplate = contentTemplate; + this.embeddings = embeddings; + this.llmEndpointParams = llmEndpointParams; + this.model = model; + this.numTokensForCompletion = numTokensForCompletion; + this.promptTemplate = promptTemplate; + this.systemMessage = systemMessage; + } + + /** + * Constructs an AI agent with default settings. + * @param jsonObject JSON object representing the AI agent. + */ + public BoxAIAgentTextGenBasicGen(JsonObject jsonObject) { + super(jsonObject); + } + + /** + * Gets how the content should be included in a request to the LLM. Input for {content} is optional, + * depending on the use. + * @return How the content should be included in a request to the LLM. + * Input for {content} is optional, depending on the use. + */ + public String getContentTemplate() { + return contentTemplate; + } + + /** + * Sets how the content should be included in a request to the LLM. Input for {content} is optional, + * depending on the use. + * @param contentTemplate How the content should be included in a request to the LLM. + * Input for {content} is optional, depending on the use. + */ + public void setContentTemplate(String contentTemplate) { + this.contentTemplate = contentTemplate; + } + + /** + * Gets the embeddings used by the AI agent. + * @return The embeddings used by the AI agent. + */ + public BoxAIAgentEmbeddings getEmbeddings() { + return embeddings; + } + + /** + * Sets the embeddings used by the AI agent. + * @param embeddings The embeddings used by the AI agent. + */ + public void setEmbeddings(BoxAIAgentEmbeddings embeddings) { + this.embeddings = embeddings; + } + + /** + * Gets the parameters for the LLM endpoint specific to OpenAI / Google models. + * @return The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + public BoxAIAgentLLMEndpointParams getLlmEndpointParams() { + return llmEndpointParams; + } + + /** + * Sets the parameters for the LLM endpoint specific to OpenAI / Google models. + * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models. + */ + public void setLlmEndpointParams(BoxAIAgentLLMEndpointParamsOpenAI llmEndpointParams) { + this.llmEndpointParams = llmEndpointParams; + } + + /** + * Gets the model used for the AI Agent for generating text. + * @return The model used for the AI Agent for generating text. + */ + public String getModel() { + return model; + } + + /** + * Sets the model used for the AI Agent for generating text. + * @param model The model used for the AI Agent for generating text. + */ + public void setModel(String model) { + this.model = model; + } + + /** + * Gets the number of tokens for completion. + * @return The number of tokens for completion. + */ + public int getNumTokensForCompletion() { + return numTokensForCompletion; + } + + /** + * Sets the number of tokens for completion. + * @param numTokensForCompletion The number of tokens for completion. + */ + public void setNumTokensForCompletion(int numTokensForCompletion) { + this.numTokensForCompletion = numTokensForCompletion; + } + + /** + * Gets the prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @return The prompt template contains contextual information of the request and the user prompt. + */ + public String getPromptTemplate() { + return promptTemplate; + } + + /** + * Sets the prompt template contains contextual information of the request and the user prompt. + * When passing prompt_template parameters, you must include inputs for {user_question} and {content}. + * Input for {current_date} is optional, depending on the use. + * @param promptTemplate The prompt template contains contextual information of the request and the user prompt. + */ + public void setPromptTemplate(String promptTemplate) { + this.promptTemplate = promptTemplate; + } + + /** + * Gets the system messages try to help the LLM "understand" its role and what it is supposed to do. + * @return The system messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public String getSystemMessage() { + return systemMessage; + } + + /** + * Sets the system messages try to help the LLM "understand" its role and what it is supposed to do. + * @param systemMessage The system messages try to help the LLM "understand" its role and what it is supposed to do. + */ + public void setSystemMessage(String systemMessage) { + this.systemMessage = systemMessage; + } + + @Override + void parseJSONMember(JsonObject.Member member) { + super.parseJSONMember(member); + String memberName = member.getName(); + switch (memberName) { + case "content_template": + this.contentTemplate = member.getValue().asString(); + break; + case "embeddings": + this.embeddings = new BoxAIAgentEmbeddings(member.getValue().asObject()); + break; + case "llm_endpoint_params": + this.llmEndpointParams = BoxAIAgentLLMEndpointParams.parse(member.getValue().asObject()); + break; + case "model": + this.model = member.getValue().asString(); + break; + case "num_tokens_for_completion": + this.numTokensForCompletion = member.getValue().asInt(); + break; + case "prompt_template": + this.promptTemplate = member.getValue().asString(); + break; + case "system_message": + this.systemMessage = member.getValue().asString(); + break; + default: + break; + } + } + + public JsonObject getJSONObject() { + JsonObject jsonObject = new JsonObject(); + JsonUtils.addIfNotNull(jsonObject, "content_template", this.contentTemplate); + JsonUtils.addIfNotNull(jsonObject, "embeddings", this.embeddings.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "llm_endpoint_params", this.llmEndpointParams.getJSONObject()); + JsonUtils.addIfNotNull(jsonObject, "model", this.model); + JsonUtils.addIfNotNull(jsonObject, "num_tokens_for_completion", this.numTokensForCompletion); + JsonUtils.addIfNotNull(jsonObject, "prompt_template", this.promptTemplate); + JsonUtils.addIfNotNull(jsonObject, "system_message", this.systemMessage); + return jsonObject; + } +} diff --git a/src/main/java/com/box/sdk/BoxAIDialogueEntry.java b/src/main/java/com/box/sdk/BoxAIDialogueEntry.java index 1a6e00e6a..072c9a77e 100644 --- a/src/main/java/com/box/sdk/BoxAIDialogueEntry.java +++ b/src/main/java/com/box/sdk/BoxAIDialogueEntry.java @@ -8,7 +8,6 @@ * Represents an entry of the history of prompts and answers previously passed to the LLM. * This provides additional context to the LLM in generating the response. */ -@BoxResourceType("file_version") public class BoxAIDialogueEntry extends BoxJSONObject { private String prompt; private String answer; diff --git a/src/main/java/com/box/sdk/BoxCollaborator.java b/src/main/java/com/box/sdk/BoxCollaborator.java index 2e1e1fbe5..d2f130fbb 100644 --- a/src/main/java/com/box/sdk/BoxCollaborator.java +++ b/src/main/java/com/box/sdk/BoxCollaborator.java @@ -185,18 +185,27 @@ protected void parseJSONMember(JsonObject.Member member) { try { - if (name.equals("type")) { - this.type = CollaboratorType.fromJSONValue(value.asString()); - } else if (name.equals("name")) { - this.name = value.asString(); - } else if (name.equals("created_at")) { - this.createdAt = BoxDateFormat.parse(value.asString()); - } else if (name.equals("modified_at")) { - this.modifiedAt = BoxDateFormat.parse(value.asString()); - } else if (name.equals("login")) { - this.login = value.asString(); - } else if (name.equals("group_type")) { - this.groupType = GroupType.fromJSONValue(value.asString()); + switch (name) { + case "type": + this.type = CollaboratorType.fromJSONValue(value.asString()); + break; + case "name": + this.name = value.asString(); + break; + case "created_at": + this.createdAt = BoxDateFormat.parse(value.asString()); + break; + case "modified_at": + this.modifiedAt = BoxDateFormat.parse(value.asString()); + break; + case "login": + this.login = value.asString(); + break; + case "group_type": + this.groupType = GroupType.fromJSONValue(value.asString()); + break; + default: + break; } } catch (Exception e) { throw new BoxDeserializationException(name, value.toString(), e); diff --git a/src/main/java/com/box/sdk/internal/utils/JsonUtils.java b/src/main/java/com/box/sdk/internal/utils/JsonUtils.java index d7ef65970..891f62043 100644 --- a/src/main/java/com/box/sdk/internal/utils/JsonUtils.java +++ b/src/main/java/com/box/sdk/internal/utils/JsonUtils.java @@ -92,4 +92,26 @@ public static void addIfNotNull(JsonObject jsonObject, String propertyName, Date jsonObject.add(propertyName, BoxDateFormat.format(propertyValue)); } } + + /** + * Add JsonObject property to json object if it's not null. + * + * @param jsonObject json object that the key/value will be added to. + * @param propertyName name of the property in json (key). + * @param propertyValue value of the property. + */ + public static void addIfNotNull(JsonObject jsonObject, String propertyName, JsonObject propertyValue) { + if (propertyValue != null) { + jsonObject.add(propertyName, propertyValue); + } + } + + /** + * Add double property to json object if it's not null. + */ + public static void addIfNotNull(JsonObject jsonObject, String propertyName, Double propertyValue) { + if (propertyValue != null) { + jsonObject.add(propertyName, propertyValue); + } + } } diff --git a/src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigAsk200.json b/src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigAsk200.json new file mode 100644 index 000000000..9416ee251 --- /dev/null +++ b/src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigAsk200.json @@ -0,0 +1,73 @@ +{ + "type": "ai_agent_ask", + "basic_text": { + "llm_endpoint_params": { + "type": "openai_params", + "frequency_penalty": 1.5, + "presence_penalty": 1.5, + "stop": "<|im_end|>", + "temperature": 0, + "top_p": 1 + }, + "model": "openai__gpt_3_5_turbo", + "num_tokens_for_completion": 8400, + "prompt_template": "It is `{current_date}`, consider these travel options `{content}` and answer `{user_question}`", + "system_message": "You are a helpful travel assistant specialized in budget travel" + }, + "basic_text_multi": { + "llm_endpoint_params": { + "type": "openai_params", + "frequency_penalty": 1.5, + "presence_penalty": 1.5, + "stop": "<|im_end|>", + "temperature": 0, + "top_p": 1 + }, + "model": "openai__gpt_3_5_turbo", + "num_tokens_for_completion": 8400, + "prompt_template": "It is `{current_date}`, consider these travel options `{content}` and answer `{user_question}`", + "system_message": "You are a helpful travel assistant specialized in budget travel" + }, + "long_text": { + "embeddings": { + "model": "openai__text_embedding_ada_002", + "strategy": { + "id": "basic", + "num_tokens_per_chunk": 64 + } + }, + "llm_endpoint_params": { + "type": "openai_params", + "frequency_penalty": 1.5, + "presence_penalty": 1.5, + "stop": "<|im_end|>", + "temperature": 0, + "top_p": 1 + }, + "model": "openai__gpt_3_5_turbo", + "num_tokens_for_completion": 8400, + "prompt_template": "It is `{current_date}`, consider these travel options `{content}` and answer `{user_question}`", + "system_message": "You are a helpful travel assistant specialized in budget travel" + }, + "long_text_multi": { + "embeddings": { + "model": "openai__text_embedding_ada_002", + "strategy": { + "id": "basic", + "num_tokens_per_chunk": 64 + } + }, + "llm_endpoint_params": { + "type": "openai_params", + "frequency_penalty": 1.5, + "presence_penalty": 1.5, + "stop": "<|im_end|>", + "temperature": 0, + "top_p": 1 + }, + "model": "openai__gpt_3_5_turbo", + "num_tokens_for_completion": 8400, + "prompt_template": "It is `{current_date}`, consider these travel options `{content}` and answer `{user_question}`", + "system_message": "You are a helpful travel assistant specialized in budget travel" + } +} \ No newline at end of file diff --git a/src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigTextGen200.json b/src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigTextGen200.json new file mode 100644 index 000000000..f024bb1be --- /dev/null +++ b/src/test/Fixtures/BoxAI/GetAIAgentDefaultConfigTextGen200.json @@ -0,0 +1,25 @@ +{ + "type": "ai_agent_text_gen", + "basic_gen": { + "content_template": "---{content}---", + "embeddings": { + "model": "openai__text_embedding_ada_002", + "strategy": { + "id": "basic", + "num_tokens_per_chunk": 64 + } + }, + "llm_endpoint_params": { + "type": "openai_params", + "frequency_penalty": 1.5, + "presence_penalty": 1.5, + "stop": "<|im_end|>", + "temperature": 0, + "top_p": 1 + }, + "model": "openai__gpt_3_5_turbo", + "num_tokens_for_completion": 8400, + "prompt_template": "It is `{current_date}`, and I have $8000 and want to spend a week in the Azores. `{user_question}`", + "system_message": "You are a helpful travel assistant specialized in budget travel" + } +} diff --git a/src/test/java/com/box/sdk/BoxAITest.java b/src/test/java/com/box/sdk/BoxAITest.java index 9bfc2ccc2..11af93666 100644 --- a/src/test/java/com/box/sdk/BoxAITest.java +++ b/src/test/java/com/box/sdk/BoxAITest.java @@ -1,5 +1,7 @@ package com.box.sdk; +import com.eclipsesource.json.Json; +import com.eclipsesource.json.JsonObject; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.junit.WireMockRule; import org.junit.Before; @@ -117,4 +119,121 @@ public void testSendAITexGenRequestWithDialogueHistorySuccess() throws ParseExce assertThat(response.getCreatedAt(), equalTo(new Date(1355338423123L))); assertThat(response.getCompletionReason(), equalTo("done")); } + + @Test + public void testSendAIRequestWithAgentSuccess() throws ParseException { + final String fileId = "12345"; + final String prompt = "What is the name of the file?"; + List dialogueHistory = new ArrayList<>(); + Date date1 = BoxDateFormat.parse("2013-05-16T15:27:57-07:00"); + Date date2 = BoxDateFormat.parse("2013-05-16T15:26:57-07:00"); + dialogueHistory.add( + new BoxAIDialogueEntry("What is the name of the file?", "Test file", date1) + ); + dialogueHistory.add( + new BoxAIDialogueEntry("What is the size of the file?", "10kb", date2) + ); + + String result = TestUtils.getFixture("BoxAI/SendAIRequest200"); + String agentText = TestUtils.getFixture("BoxAI/GetAIAgentDefaultConfigAsk200"); + wireMockRule.stubFor(WireMock.post(WireMock.urlPathEqualTo("/2.0/ai/ask")) + .willReturn(WireMock.aResponse() + .withHeader("Content-Type", APPLICATION_JSON) + .withBody(result))); + + BoxAIAgentAsk agent = new BoxAIAgentAsk(Json.parse(agentText).asObject()); + BoxAIResponse response = BoxAI.sendAIRequest( + api, + prompt, + Collections.singletonList(new BoxAIItem(fileId, BoxAIItem.Type.FILE)), + BoxAI.Mode.SINGLE_ITEM_QA, + dialogueHistory, + agent, + false + ); + + assertThat( + response.getAnswer(), equalTo("Public APIs are important because of key and important reasons.") + ); + assertThat(response.getCreatedAt(), equalTo(new Date(1355338423123L))); + assertThat(response.getCompletionReason(), equalTo("done")); + } + + @Test + public void testSendAITextGenRequestWithAgentSuccess() throws ParseException { + final String fileId = "12345"; + final String prompt = "What is the name of the file?"; + Date date1 = BoxDateFormat.parse("2013-05-16T15:27:57-07:00"); + Date date2 = BoxDateFormat.parse("2013-05-16T15:26:57-07:00"); + List dialogueHistory = new ArrayList<>(); + dialogueHistory.add( + new BoxAIDialogueEntry("What is the name of the file?", "Test file", date1) + ); + dialogueHistory.add( + new BoxAIDialogueEntry("What is the size of the file?", "10kb", date2) + ); + + String result = TestUtils.getFixture("BoxAI/SendAITextGen200"); + String agentText = TestUtils.getFixture("BoxAI/GetAIAgentDefaultConfigTextGen200"); + BoxAIAgentTextGen agent = new BoxAIAgentTextGen(Json.parse(agentText).asObject()); + + wireMockRule.stubFor(WireMock.post(WireMock.urlPathEqualTo("/2.0/ai/text_gen")) + .willReturn(WireMock.aResponse() + .withHeader("Content-Type", APPLICATION_JSON) + .withBody(result))); + + BoxAIResponse response = BoxAI.sendAITextGenRequest( + api, + prompt, + Collections.singletonList(new BoxAIItem(fileId, BoxAIItem.Type.FILE)), + dialogueHistory, + agent + ); + + assertThat( + response.getAnswer(), equalTo("Public APIs are important because of key and important reasons.") + ); + assertThat(response.getCreatedAt(), equalTo(new Date(1355338423123L))); + assertThat(response.getCompletionReason(), equalTo("done")); + } + + @Test + public void testGetAIAgentDefaultConfigAskSuccess() { + String result = TestUtils.getFixture("BoxAI/GetAIAgentDefaultConfigAsk200"); + String urlPath = "/2.0/ai_agent_default"; + + wireMockRule.stubFor(WireMock.get(WireMock.urlPathEqualTo(urlPath)) + .willReturn(WireMock.aResponse() + .withHeader("Content-Type", APPLICATION_JSON) + .withBody(result))); + + BoxAIAgent agent = BoxAI.getAiAgentDefaultConfig(api, BoxAIAgent.Mode.ASK, "en", "openai__gpt_3_5_turbo"); + BoxAIAgentAsk agentAsk = (BoxAIAgentAsk) agent; + + assertThat(agent.getType(), equalTo("ai_agent_ask")); + assertThat(agentAsk.getBasicText().getModel(), equalTo("openai__gpt_3_5_turbo")); + + JsonObject jsonResult = Json.parse(result).asObject(); + assertThat(agent.getJSONObject().toString(), equalTo(jsonResult.toString())); + } + + @Test + public void testGetAIAgentDefaultConfigTextGenSuccess() { + String result = TestUtils.getFixture("BoxAI/GetAIAgentDefaultConfigTextGen200"); + String urlPath = "/2.0/ai_agent_default"; + + wireMockRule.stubFor(WireMock.get(WireMock.urlPathEqualTo(urlPath)) + .willReturn(WireMock.aResponse() + .withHeader("Content-Type", APPLICATION_JSON) + .withBody(result))); + + BoxAIAgent agent = BoxAI.getAiAgentDefaultConfig(api, BoxAIAgent.Mode.TEXT_GEN, "en", "openai__gpt_3_5_turbo"); + BoxAIAgentTextGen agentTextGen = (BoxAIAgentTextGen) agent; + + assertThat(agent.getType(), equalTo("ai_agent_text_gen")); + assertThat(agentTextGen.getBasicGen().getModel(), equalTo("openai__gpt_3_5_turbo")); + + JsonObject jsonResult = Json.parse(result).asObject(); + assertThat(agent.getJSONObject().toString(), equalTo(jsonResult.toString())); + } }