• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

box / box-java-sdk / #3992

06 Sep 2024 01:11PM UTC coverage: 71.703% (-0.7%) from 72.441%
#3992

push

github

web-flow
feat: Support AI Agent (#1265)

248 of 452 new or added lines in 14 files covered. (54.87%)

3 existing lines in 3 files now uncovered.

7921 of 11047 relevant lines covered (71.7%)

0.72 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

53.06
/src/main/java/com/box/sdk/BoxAIAgentAskBasicText.java
1
package com.box.sdk;
2

3
import com.box.sdk.internal.utils.JsonUtils;
4
import com.eclipsesource.json.JsonObject;
5
import com.eclipsesource.json.JsonValue;
6

7
/**
8
 * AI agent tool used to handle basic text.
9
 */
10
public class BoxAIAgentAskBasicText extends BoxJSONObject {
11
    /**
12
     * The parameters for the LLM endpoint specific to OpenAI / Google models.
13
     */
14
    private BoxAIAgentLLMEndpointParams llmEndpointParams;
15
    /**
16
     * The model used for the AI Agent for basic text.
17
     */
18
    private String model;
19
    /**
20
     * The number of tokens for completion.
21
     */
22
    private int numTokensForCompletion;
23
    /**
24
     * The prompt template contains contextual information of the request and the user prompt.
25
     * When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
26
     * Input for {current_date} is optional, depending on the use.
27
     */
28
    private String promptTemplate;
29
    /**
30
     * System messages try to help the LLM "understand" its role and what it is supposed to do.
31
     */
32
    private String systemMessage;
33

34
    /**
35
     * Constructs an AI agent with default settings.
36
     * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models.
37
     * @param model The model used for the AI Agent for basic text.
38
     * @param numTokensForCompletion The number of tokens for completion.
39
     * @param promptTemplate The prompt template contains contextual information of the request and the user prompt.
40
     *                       When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
41
     *                       Input for {current_date} is optional, depending on the use.
42
     * @param systemMessage System messages try to help the LLM "understand" its role and what it is supposed to do.
43
     */
44
    public BoxAIAgentAskBasicText(BoxAIAgentLLMEndpointParams llmEndpointParams, String model,
45
                                  int numTokensForCompletion, String promptTemplate, String systemMessage) {
NEW
46
        super();
×
NEW
47
        this.llmEndpointParams = llmEndpointParams;
×
NEW
48
        this.model = model;
×
NEW
49
        this.numTokensForCompletion = numTokensForCompletion;
×
NEW
50
        this.promptTemplate = promptTemplate;
×
NEW
51
        this.systemMessage = systemMessage;
×
NEW
52
    }
×
53

54
    /**
55
     * Constructs an AI agent with default settings.
56
     * @param jsonObject JSON object representing the AI agent.
57
     */
58
    public BoxAIAgentAskBasicText(JsonObject jsonObject) {
59
        super(jsonObject);
1✔
60
    }
1✔
61

62
    /**
63
     * Gets the parameters for the LLM endpoint specific to OpenAI / Google models.
64
     * @return The parameters for the LLM endpoint specific to OpenAI / Google models.
65
     */
66
    public BoxAIAgentLLMEndpointParams getLlmEndpointParams() {
NEW
67
        return llmEndpointParams;
×
68
    }
69

70
    /**
71
     * Sets the parameters for the LLM endpoint specific to OpenAI / Google models.
72
     * @param llmEndpointParams The parameters for the LLM endpoint specific to OpenAI / Google models.
73
     */
74
    public void setLlmEndpointParams(BoxAIAgentLLMEndpointParams llmEndpointParams) {
NEW
75
        this.llmEndpointParams = llmEndpointParams;
×
NEW
76
    }
×
77

78
    /**
79
     * Gets the model used for the AI Agent for basic text.
80
     * @return The model used for the AI Agent for basic text.
81
     */
82
    public String getModel() {
83
        return model;
1✔
84
    }
85

86
    /**
87
     * Sets the model used for the AI Agent for basic text.
88
     * @param model The model used for the AI Agent for basic text.
89
     */
90
    public void setModel(String model) {
NEW
91
        this.model = model;
×
NEW
92
    }
×
93

94
    /**
95
     * Gets the number of tokens for completion.
96
     * @return The number of tokens for completion.
97
     */
98
    public int getNumTokensForCompletion() {
NEW
99
        return numTokensForCompletion;
×
100
    }
101

102
    /**
103
     * Sets the number of tokens for completion.
104
     * @param numTokensForCompletion The number of tokens for completion.
105
     */
106
    public void setNumTokensForCompletion(int numTokensForCompletion) {
NEW
107
        this.numTokensForCompletion = numTokensForCompletion;
×
NEW
108
    }
×
109

110
    /**
111
     * Gets the prompt template contains contextual information of the request and the user prompt.
112
     * When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
113
     * Input for {current_date} is optional, depending on the use.
114
     * @return The prompt template contains contextual information of the request and the user prompt.
115
     */
116
    public String getPromptTemplate() {
NEW
117
        return promptTemplate;
×
118
    }
119

120
    /**
121
     * Sets the prompt template contains contextual information of the request and the user prompt.
122
     * When passing prompt_template parameters, you must include inputs for {user_question} and {content}.
123
     * Input for {current_date} is optional, depending on the use.
124
     * @param promptTemplate The prompt template contains contextual information of the request and the user prompt.
125
     */
126
    public void setPromptTemplate(String promptTemplate) {
NEW
127
        this.promptTemplate = promptTemplate;
×
NEW
128
    }
×
129

130
    /**
131
     * Gets the system messages try to help the LLM "understand" its role and what it is supposed to do.
132
     * @return The system messages try to help the LLM "understand" its role and what it is supposed to do.
133
     */
134
    public String getSystemMessage() {
NEW
135
        return systemMessage;
×
136
    }
137

138
    /**
139
     * Sets the system messages try to help the LLM "understand" its role and what it is supposed to do.
140
     * @param systemMessage The system messages try to help the LLM "understand" its role and what it is supposed to do.
141
     */
142
    public void setSystemMessage(String systemMessage) {
NEW
143
        this.systemMessage = systemMessage;
×
NEW
144
    }
×
145

146
    @Override
147
    void parseJSONMember(JsonObject.Member member) {
148
        super.parseJSONMember(member);
1✔
149
        String memberName = member.getName();
1✔
150
        JsonValue memberValue = member.getValue();
1✔
151
        try {
152
            switch (memberName) {
1✔
153
                case "llm_endpoint_params":
154
                    this.llmEndpointParams = BoxAIAgentLLMEndpointParams.parse(memberValue.asObject());
1✔
155
                    break;
1✔
156
                case "model":
157
                    this.model = memberValue.asString();
1✔
158
                    break;
1✔
159
                case "num_tokens_for_completion":
160
                    this.numTokensForCompletion = memberValue.asInt();
1✔
161
                    break;
1✔
162
                case "prompt_template":
163
                    this.promptTemplate = memberValue.asString();
1✔
164
                    break;
1✔
165
                case "system_message":
166
                    this.systemMessage = memberValue.asString();
1✔
167
                    break;
1✔
168
                default:
169
                    break;
170
            }
NEW
171
        } catch (Exception e) {
×
NEW
172
            throw new BoxDeserializationException(memberName, memberValue.toString(), e);
×
173
        }
1✔
174
    }
1✔
175

176
    public JsonObject getJSONObject() {
177
        JsonObject jsonObject = new JsonObject();
1✔
178
        JsonUtils.addIfNotNull(jsonObject, "llm_endpoint_params", this.llmEndpointParams.getJSONObject());
1✔
179
        JsonUtils.addIfNotNull(jsonObject, "model", this.model);
1✔
180
        JsonUtils.addIfNotNull(jsonObject, "num_tokens_for_completion", this.numTokensForCompletion);
1✔
181
        JsonUtils.addIfNotNull(jsonObject, "prompt_template", this.promptTemplate);
1✔
182
        JsonUtils.addIfNotNull(jsonObject, "system_message", this.systemMessage);
1✔
183
        return jsonObject;
1✔
184
    }
185
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc