001package com.box.sdk;
002
003import com.box.sdk.internal.utils.JsonUtils;
004import com.eclipsesource.json.JsonObject;
005
006/**
007 * Represents an AI agent tool used to handle longer text.
008 */
009public class BoxAIAgentAskLongText extends BoxJSONObject {
010    /**
011     * Embeddings used by the AI agent.
012     */
013    private BoxAIAgentEmbeddings embeddings;
014    /**
015     * The parameters for the LLM endpoint specific to OpenAI / Google models.
016     */
017    private BoxAIAgentLLMEndpointParams llmEndpointParams;
018    /**
019     * The model used for the AI Agent for basic text.
020     */
021    private String model;
022    /**
023     * The number of tokens for completion.
024     */
025    private int numTokensForCompletion;
026    /**
027     * The prompt template contains contextual information of the request and the user prompt.
028     * When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
029     * Input for {current_date} is optional, depending on the use.
030     */
031    private String promptTemplate;
032    /**
033     * System messages try to help the LLM "understand" its role and what it is supposed to do.
034     */
035    private String systemMessage;
036
037    /**
038     * Constructs an AI agent with default settings.
039     * @param embeddings Embeddings used by the AI agent.
040     * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models.
041     *                          Value can be "google_params" or "openai_params".
042     * @param model The model used for the AI Agent for basic text.
043     * @param numTokensForCompletion The number of tokens for completion.
044     * @param promptTemplate The prompt template contains contextual information of the request and the user prompt.
045     *                       When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
046     *                       Input for {current_date} is optional, depending on the use.
047     * @param systemMessage System messages try to help the LLM "understand" its role and what it is supposed to do.
048     */
049    public BoxAIAgentAskLongText(BoxAIAgentEmbeddings embeddings,
050                                 BoxAIAgentLLMEndpointParams llmEndpointParams,
051                                 String model, int numTokensForCompletion,
052                                 String promptTemplate,
053                                 String systemMessage) {
054        this.embeddings = embeddings;
055        this.llmEndpointParams = llmEndpointParams;
056        this.model = model;
057        this.numTokensForCompletion = numTokensForCompletion;
058        this.promptTemplate = promptTemplate;
059        this.systemMessage = systemMessage;
060    }
061
062    /**
063     * Constructs an AI agent with default settings.
064     * @param jsonObject JSON object representing the AI agent.
065     */
066    public BoxAIAgentAskLongText(JsonObject jsonObject) {
067        super(jsonObject);
068    }
069
070    /**
071     * Gets the embeddings used by the AI agent.
072     * @return The embeddings used by the AI agent.
073     */
074    public BoxAIAgentEmbeddings getEmbeddings() {
075        return embeddings;
076    }
077
078    /**
079     * Sets the embeddings used by the AI agent.
080     * @param embeddings The embeddings used by the AI agent.
081     */
082    public void setEmbeddings(BoxAIAgentEmbeddings embeddings) {
083        this.embeddings = embeddings;
084    }
085
086    /**
087     * Gets the parameters for the LLM endpoint specific to OpenAI / Google models.
088     * @return The parameters for the LLM endpoint specific to OpenAI / Google models.
089     */
090    public BoxAIAgentLLMEndpointParams getLlmEndpointParams() {
091        return llmEndpointParams;
092    }
093
094    /**
095     * Sets the parameters for the LLM endpoint specific to OpenAI / Google models.
096     * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models.
097     */
098    public void setLlmEndpointParams(BoxAIAgentLLMEndpointParams llmEndpointParams) {
099        this.llmEndpointParams = llmEndpointParams;
100    }
101
102    /**
103     * Gets the model used for the AI Agent for basic text.
104     * @return The model used for the AI Agent for basic text.
105     */
106    public String getModel() {
107        return model;
108    }
109
110    /**
111     * Sets the model used for the AI Agent for basic text.
112     * @param model The model used for the AI Agent for basic text.
113     */
114    public void setModel(String model) {
115        this.model = model;
116    }
117
118    /**
119     * Gets the number of tokens for completion.
120     * @return The number of tokens for completion.
121     */
122    public int getNumTokensForCompletion() {
123        return numTokensForCompletion;
124    }
125
126    /**
127     * Sets the number of tokens for completion.
128     * @param numTokensForCompletion The number of tokens for completion.
129     */
130    public void setNumTokensForCompletion(int numTokensForCompletion) {
131        this.numTokensForCompletion = numTokensForCompletion;
132    }
133
134    /**
135     * Gets the prompt template contains contextual information of the request and the user prompt.
136     * When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
137     * Input for {current_date} is optional, depending on the use.
138     * @return The prompt template contains contextual information of the request and the user prompt.
139     */
140    public String getPromptTemplate() {
141        return promptTemplate;
142    }
143
144    /**
145     * Sets the prompt template contains contextual information of the request and the user prompt.
146     * When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
147     * Input for {current_date} is optional, depending on the use.
148     * @param promptTemplate The prompt template contains contextual information of the request and the user prompt.
149     */
150    public void setPromptTemplate(String promptTemplate) {
151        this.promptTemplate = promptTemplate;
152    }
153
154    /**
155     * Gets the system messages try to help the LLM "understand" its role and what it is supposed to do.
156     * @return The system messages try to help the LLM "understand" its role and what it is supposed to do.
157     */
158    public String getSystemMessage() {
159        return systemMessage;
160    }
161
162    /**
163     * Sets the system messages try to help the LLM "understand" its role and what it is supposed to do.
164     * @param systemMessage The system messages try to help the LLM "understand" its role and what it is supposed to do.
165     */
166    public void setSystemMessage(String systemMessage) {
167        this.systemMessage = systemMessage;
168    }
169
170    @Override
171    void parseJSONMember(JsonObject.Member member) {
172        super.parseJSONMember(member);
173        String memberName = member.getName();
174        try {
175            switch (memberName) {
176                case "embeddings":
177                    this.embeddings = new BoxAIAgentEmbeddings(member.getValue().asObject());
178                    break;
179                case "llm_endpoint_params":
180                    this.llmEndpointParams = BoxAIAgentLLMEndpointParams.parse(member.getValue().asObject());
181                    break;
182                case "model":
183                    this.model = member.getValue().asString();
184                    break;
185                case "num_tokens_for_completion":
186                    this.numTokensForCompletion = member.getValue().asInt();
187                    break;
188                case "prompt_template":
189                    this.promptTemplate = member.getValue().asString();
190                    break;
191                case "system_message":
192                    this.systemMessage = member.getValue().asString();
193                    break;
194                default:
195                    break;
196            }
197        } catch (Exception e) {
198            throw new BoxAPIException("Could not parse JSON response.", e);
199        }
200    }
201
202    public JsonObject getJSONObject() {
203        JsonObject jsonObject = new JsonObject();
204        JsonUtils.addIfNotNull(jsonObject, "embeddings", this.embeddings.getJSONObject());
205        JsonUtils.addIfNotNull(jsonObject, "llm_endpoint_params", this.llmEndpointParams.getJSONObject());
206        JsonUtils.addIfNotNull(jsonObject, "model", this.model);
207        JsonUtils.addIfNotNull(jsonObject, "num_tokens_for_completion", this.numTokensForCompletion);
208        JsonUtils.addIfNotNull(jsonObject, "prompt_template", this.promptTemplate);
209        JsonUtils.addIfNotNull(jsonObject, "system_message", this.systemMessage);
210        return jsonObject;
211    }
212}