• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

box / box-python-sdk-gen / 13133091889

04 Feb 2025 10:11AM UTC coverage: 92.753% (-0.01%) from 92.766%
13133091889

Pull #466

github

web-flow
Merge 8f41ab742 into 9779bd9d2
Pull Request #466: feat: add hubs support to /ai/ask (box/box-openapi#506)

20 of 21 new or added lines in 4 files covered. (95.24%)

2 existing lines in 1 file now uncovered.

11557 of 12460 relevant lines covered (92.75%)

0.93 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.32
/box_sdk_gen/managers/ai.py
1
from enum import Enum
1✔
2

3
from typing import Optional
1✔
4

5
from box_sdk_gen.internal.base_object import BaseObject
1✔
6

7
from typing import List
1✔
8

9
from typing import Dict
1✔
10

11
from box_sdk_gen.serialization.json import serialize
1✔
12

13
from box_sdk_gen.internal.utils import to_string
1✔
14

15
from box_sdk_gen.serialization.json import deserialize
1✔
16

17
from typing import Union
1✔
18

19
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk
1✔
20

21
from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory
1✔
22

23
from box_sdk_gen.networking.fetch_options import ResponseFormat
1✔
24

25
from box_sdk_gen.schemas.ai_item_base import AiItemBase
1✔
26

27
from box_sdk_gen.schemas.ai_response_full import AiResponseFull
1✔
28

29
from box_sdk_gen.schemas.client_error import ClientError
1✔
30

31
from box_sdk_gen.schemas.ai_ask import AiAsk
1✔
32

33
from box_sdk_gen.schemas.ai_response import AiResponse
1✔
34

35
from box_sdk_gen.schemas.ai_text_gen import AiTextGen
1✔
36

37
from box_sdk_gen.schemas.ai_agent_ask import AiAgentAsk
1✔
38

39
from box_sdk_gen.schemas.ai_agent_text_gen import AiAgentTextGen
1✔
40

41
from box_sdk_gen.schemas.ai_agent_extract import AiAgentExtract
1✔
42

43
from box_sdk_gen.schemas.ai_agent_extract_structured import AiAgentExtractStructured
1✔
44

45
from box_sdk_gen.schemas.ai_extract import AiExtract
1✔
46

47
from box_sdk_gen.schemas.ai_extract_structured_response import (
1✔
48
    AiExtractStructuredResponse,
49
)
50

51
from box_sdk_gen.schemas.ai_extract_structured import AiExtractStructured
1✔
52

53
from box_sdk_gen.box.errors import BoxSDKError
1✔
54

55
from box_sdk_gen.networking.auth import Authentication
1✔
56

57
from box_sdk_gen.networking.network import NetworkSession
1✔
58

59
from box_sdk_gen.networking.fetch_options import FetchOptions
1✔
60

61
from box_sdk_gen.networking.fetch_response import FetchResponse
1✔
62

63
from box_sdk_gen.internal.utils import prepare_params
1✔
64

65
from box_sdk_gen.internal.utils import to_string
1✔
66

67
from box_sdk_gen.internal.utils import ByteStream
1✔
68

69
from box_sdk_gen.serialization.json import SerializedData
1✔
70

71
from box_sdk_gen.serialization.json import sd_to_json
1✔
72

73

74
class CreateAiAskMode(str, Enum):
1✔
75
    MULTIPLE_ITEM_QA = 'multiple_item_qa'
1✔
76
    SINGLE_ITEM_QA = 'single_item_qa'
1✔
77

78

79
class CreateAiTextGenItemsTypeField(str, Enum):
1✔
80
    FILE = 'file'
1✔
81

82

83
class CreateAiTextGenItems(BaseObject):
1✔
84
    _discriminator = 'type', {'file'}
1✔
85

86
    def __init__(
1✔
87
        self,
88
        id: str,
89
        *,
90
        type: CreateAiTextGenItemsTypeField = CreateAiTextGenItemsTypeField.FILE,
91
        content: Optional[str] = None,
92
        **kwargs
93
    ):
94
        """
95
        :param id: The ID of the item.
96
        :type id: str
97
        :param type: The type of the item., defaults to CreateAiTextGenItemsTypeField.FILE
98
        :type type: CreateAiTextGenItemsTypeField, optional
99
        :param content: The content to use as context for generating new text or editing existing text., defaults to None
100
        :type content: Optional[str], optional
101
        """
102
        super().__init__(**kwargs)
1✔
103
        self.id = id
1✔
104
        self.type = type
1✔
105
        self.content = content
1✔
106

107

108
class GetAiAgentDefaultConfigMode(str, Enum):
1✔
109
    ASK = 'ask'
1✔
110
    TEXT_GEN = 'text_gen'
1✔
111
    EXTRACT = 'extract'
1✔
112
    EXTRACT_STRUCTURED = 'extract_structured'
1✔
113

114

115
class CreateAiExtractStructuredMetadataTemplateTypeField(str, Enum):
1✔
116
    METADATA_TEMPLATE = 'metadata_template'
1✔
117

118

119
class CreateAiExtractStructuredMetadataTemplate(BaseObject):
1✔
120
    _discriminator = 'type', {'metadata_template'}
1✔
121

122
    def __init__(
1✔
123
        self,
124
        *,
125
        template_key: Optional[str] = None,
126
        type: Optional[CreateAiExtractStructuredMetadataTemplateTypeField] = None,
127
        scope: Optional[str] = None,
128
        **kwargs
129
    ):
130
        """
131
                :param template_key: The name of the metadata template., defaults to None
132
                :type template_key: Optional[str], optional
133
                :param type: Value is always `metadata_template`., defaults to None
134
                :type type: Optional[CreateAiExtractStructuredMetadataTemplateTypeField], optional
135
                :param scope: The scope of the metadata template that can either be global or
136
        enterprise.
137
        * The **global** scope is used for templates that are
138
        available to any Box enterprise.
139
        * The **enterprise** scope represents templates created within a specific enterprise,
140
          containing the ID of that enterprise., defaults to None
141
                :type scope: Optional[str], optional
142
        """
143
        super().__init__(**kwargs)
1✔
144
        self.template_key = template_key
1✔
145
        self.type = type
1✔
146
        self.scope = scope
1✔
147

148

149
class CreateAiExtractStructuredFieldsOptionsField(BaseObject):
1✔
150
    def __init__(self, key: str, **kwargs):
1✔
151
        """
152
        :param key: A unique identifier for the field.
153
        :type key: str
154
        """
155
        super().__init__(**kwargs)
1✔
156
        self.key = key
1✔
157

158

159
class CreateAiExtractStructuredFields(BaseObject):
1✔
160
    _fields_to_json_mapping: Dict[str, str] = {
1✔
161
        'display_name': 'displayName',
162
        **BaseObject._fields_to_json_mapping,
163
    }
164
    _json_to_fields_mapping: Dict[str, str] = {
1✔
165
        'displayName': 'display_name',
166
        **BaseObject._json_to_fields_mapping,
167
    }
168

169
    def __init__(
1✔
170
        self,
171
        key: str,
172
        *,
173
        description: Optional[str] = None,
174
        display_name: Optional[str] = None,
175
        prompt: Optional[str] = None,
176
        type: Optional[str] = None,
177
        options: Optional[List[CreateAiExtractStructuredFieldsOptionsField]] = None,
178
        **kwargs
179
    ):
180
        """
181
        :param key: A unique identifier for the field.
182
        :type key: str
183
        :param description: A description of the field., defaults to None
184
        :type description: Optional[str], optional
185
        :param display_name: The display name of the field., defaults to None
186
        :type display_name: Optional[str], optional
187
        :param prompt: The context about the key that may include how to find and format it., defaults to None
188
        :type prompt: Optional[str], optional
189
        :param type: The type of the field. It include but is not limited to string, float, date, enum, and multiSelect., defaults to None
190
        :type type: Optional[str], optional
191
        :param options: A list of options for this field. This is most often used in combination with the enum and multiSelect field types., defaults to None
192
        :type options: Optional[List[CreateAiExtractStructuredFieldsOptionsField]], optional
193
        """
194
        super().__init__(**kwargs)
1✔
195
        self.key = key
1✔
196
        self.description = description
1✔
197
        self.display_name = display_name
1✔
198
        self.prompt = prompt
1✔
199
        self.type = type
1✔
200
        self.options = options
1✔
201

202

203
class AiManager:
1✔
204
    def __init__(
1✔
205
        self,
206
        *,
207
        auth: Optional[Authentication] = None,
208
        network_session: NetworkSession = None
209
    ):
210
        if network_session is None:
1✔
211
            network_session = NetworkSession()
×
212
        self.auth = auth
1✔
213
        self.network_session = network_session
1✔
214

215
    def create_ai_ask(
1✔
216
        self,
217
        mode: CreateAiAskMode,
218
        prompt: str,
219
        items: List[AiItemAsk],
220
        *,
221
        dialogue_history: Optional[List[AiDialogueHistory]] = None,
222
        include_citations: Optional[bool] = None,
223
        ai_agent: Optional[AiAgentAsk] = None,
224
        extra_headers: Optional[Dict[str, Optional[str]]] = None
225
    ) -> Optional[AiResponseFull]:
226
        """
227
                Sends an AI request to supported LLMs and returns an answer specifically focused on the user's question given the provided context.
228
                :param mode: The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
229
                :type mode: CreateAiAskMode
230
                :param prompt: The prompt provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters.
231
                :type prompt: str
232
                :param items: The items to be processed by the LLM, often files.
233

234
        **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first.
235
        If the file size exceeds 1MB, the first 1MB of text representation will be processed.
236
        If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
237
                :type items: List[AiItemAsk]
238
                :param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
239
                :type dialogue_history: Optional[List[AiDialogueHistory]], optional
240
                :param include_citations: A flag to indicate whether citations should be returned., defaults to None
241
                :type include_citations: Optional[bool], optional
242
                :param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
243
                :type extra_headers: Optional[Dict[str, Optional[str]]], optional
244
        """
245
        if extra_headers is None:
1✔
246
            extra_headers = {}
1✔
247
        request_body: Dict = {
1✔
248
            'mode': mode,
249
            'prompt': prompt,
250
            'items': items,
251
            'dialogue_history': dialogue_history,
252
            'include_citations': include_citations,
253
            'ai_agent': ai_agent,
254
        }
255
        headers_map: Dict[str, str] = prepare_params({**extra_headers})
1✔
256
        response: FetchResponse = self.network_session.network_client.fetch(
1✔
257
            FetchOptions(
258
                url=''.join([self.network_session.base_urls.base_url, '/2.0/ai/ask']),
259
                method='POST',
260
                headers=headers_map,
261
                data=serialize(request_body),
262
                content_type='application/json',
263
                response_format=ResponseFormat.JSON,
264
                auth=self.auth,
265
                network_session=self.network_session,
266
            )
267
        )
268
        if to_string(response.status) == '204':
1✔
NEW
269
            return None
×
270
        return deserialize(response.data, AiResponseFull)
1✔
271

272
    def create_ai_text_gen(
1✔
273
        self,
274
        prompt: str,
275
        items: List[CreateAiTextGenItems],
276
        *,
277
        dialogue_history: Optional[List[AiDialogueHistory]] = None,
278
        ai_agent: Optional[AiAgentTextGen] = None,
279
        extra_headers: Optional[Dict[str, Optional[str]]] = None
280
    ) -> AiResponse:
281
        """
282
                Sends an AI request to supported Large Language Models (LLMs) and returns generated text based on the provided prompt.
283
                :param prompt: The prompt provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters.
284
                :type prompt: str
285
                :param items: The items to be processed by the LLM, often files.
286
        The array can include **exactly one** element.
287

288
        **Note**: Box AI handles documents with text representations up to 1MB in size.
289
        If the file size exceeds 1MB, the first 1MB of text representation will be processed.
290
                :type items: List[CreateAiTextGenItems]
291
                :param dialogue_history: The history of prompts and answers previously passed to the LLM. This parameter provides the additional context to the LLM when generating the response., defaults to None
292
                :type dialogue_history: Optional[List[AiDialogueHistory]], optional
293
                :param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
294
                :type extra_headers: Optional[Dict[str, Optional[str]]], optional
295
        """
296
        if extra_headers is None:
1✔
297
            extra_headers = {}
1✔
298
        request_body: Dict = {
1✔
299
            'prompt': prompt,
300
            'items': items,
301
            'dialogue_history': dialogue_history,
302
            'ai_agent': ai_agent,
303
        }
304
        headers_map: Dict[str, str] = prepare_params({**extra_headers})
1✔
305
        response: FetchResponse = self.network_session.network_client.fetch(
1✔
306
            FetchOptions(
307
                url=''.join(
308
                    [self.network_session.base_urls.base_url, '/2.0/ai/text_gen']
309
                ),
310
                method='POST',
311
                headers=headers_map,
312
                data=serialize(request_body),
313
                content_type='application/json',
314
                response_format=ResponseFormat.JSON,
315
                auth=self.auth,
316
                network_session=self.network_session,
317
            )
318
        )
319
        return deserialize(response.data, AiResponse)
1✔
320

321
    def get_ai_agent_default_config(
1✔
322
        self,
323
        mode: GetAiAgentDefaultConfigMode,
324
        *,
325
        language: Optional[str] = None,
326
        model: Optional[str] = None,
327
        extra_headers: Optional[Dict[str, Optional[str]]] = None
328
    ) -> Union[AiAgentAsk, AiAgentTextGen, AiAgentExtract, AiAgentExtractStructured]:
329
        """
330
                Get the AI agent default config
331
                :param mode: The mode to filter the agent config to return.
332
                :type mode: GetAiAgentDefaultConfigMode
333
                :param language: The ISO language code to return the agent config for.
334
        If the language is not supported the default agent config is returned., defaults to None
335
                :type language: Optional[str], optional
336
                :param model: The model to return the default agent config for., defaults to None
337
                :type model: Optional[str], optional
338
                :param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
339
                :type extra_headers: Optional[Dict[str, Optional[str]]], optional
340
        """
341
        if extra_headers is None:
1✔
342
            extra_headers = {}
1✔
343
        query_params_map: Dict[str, str] = prepare_params(
1✔
344
            {
345
                'mode': to_string(mode),
346
                'language': to_string(language),
347
                'model': to_string(model),
348
            }
349
        )
350
        headers_map: Dict[str, str] = prepare_params({**extra_headers})
1✔
351
        response: FetchResponse = self.network_session.network_client.fetch(
1✔
352
            FetchOptions(
353
                url=''.join(
354
                    [self.network_session.base_urls.base_url, '/2.0/ai_agent_default']
355
                ),
356
                method='GET',
357
                params=query_params_map,
358
                headers=headers_map,
359
                response_format=ResponseFormat.JSON,
360
                auth=self.auth,
361
                network_session=self.network_session,
362
            )
363
        )
364
        return deserialize(
1✔
365
            response.data,
366
            Union[AiAgentAsk, AiAgentTextGen, AiAgentExtract, AiAgentExtractStructured],
367
        )
368

369
    def create_ai_extract(
1✔
370
        self,
371
        prompt: str,
372
        items: List[AiItemBase],
373
        *,
374
        ai_agent: Optional[AiAgentExtract] = None,
375
        extra_headers: Optional[Dict[str, Optional[str]]] = None
376
    ) -> AiResponse:
377
        """
378
        Sends an AI request to supported Large Language Models (LLMs) and extracts metadata in form of key-value pairs.
379

380
        In this request, both the prompt and the output can be freeform.
381

382

383
        Metadata template setup before sending the request is not required.
384

385
        :param prompt: The prompt provided to a Large Language Model (LLM) in the request. The prompt can be up to 10000 characters long and it can be an XML or a JSON schema.
386
        :type prompt: str
387
        :param items: The items that LLM will process. Currently, you can use files only.
388
        :type items: List[AiItemBase]
389
        :param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
390
        :type extra_headers: Optional[Dict[str, Optional[str]]], optional
391
        """
392
        if extra_headers is None:
1✔
393
            extra_headers = {}
1✔
394
        request_body: Dict = {'prompt': prompt, 'items': items, 'ai_agent': ai_agent}
1✔
395
        headers_map: Dict[str, str] = prepare_params({**extra_headers})
1✔
396
        response: FetchResponse = self.network_session.network_client.fetch(
1✔
397
            FetchOptions(
398
                url=''.join(
399
                    [self.network_session.base_urls.base_url, '/2.0/ai/extract']
400
                ),
401
                method='POST',
402
                headers=headers_map,
403
                data=serialize(request_body),
404
                content_type='application/json',
405
                response_format=ResponseFormat.JSON,
406
                auth=self.auth,
407
                network_session=self.network_session,
408
            )
409
        )
410
        return deserialize(response.data, AiResponse)
1✔
411

412
    def create_ai_extract_structured(
1✔
413
        self,
414
        items: List[AiItemBase],
415
        *,
416
        metadata_template: Optional[CreateAiExtractStructuredMetadataTemplate] = None,
417
        fields: Optional[List[CreateAiExtractStructuredFields]] = None,
418
        ai_agent: Optional[AiAgentExtractStructured] = None,
419
        extra_headers: Optional[Dict[str, Optional[str]]] = None
420
    ) -> AiExtractStructuredResponse:
421
        """
422
                Sends an AI request to supported Large Language Models (LLMs) and returns extracted metadata as a set of key-value pairs.
423

424
                For this request, you either need a metadata template or a list of fields you want to extract.
425

426

427
                Input is **either** a metadata template or a list of fields to ensure the structure.
428

429

430
                To learn more about creating templates, see [Creating metadata templates in the Admin Console](https://support.box.com/hc/en-us/articles/360044194033-Customizing-Metadata-Templates)
431

432

433
                or use the [metadata template API](g://metadata/templates/create).
434

435
                :param items: The items to be processed by the LLM. Currently you can use files only.
436
                :type items: List[AiItemBase]
437
                :param metadata_template: The metadata template containing the fields to extract.
438
        For your request to work, you must provide either `metadata_template` or `fields`, but not both., defaults to None
439
                :type metadata_template: Optional[CreateAiExtractStructuredMetadataTemplate], optional
440
                :param fields: The fields to be extracted from the provided items.
441
        For your request to work, you must provide either `metadata_template` or `fields`, but not both., defaults to None
442
                :type fields: Optional[List[CreateAiExtractStructuredFields]], optional
443
                :param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
444
                :type extra_headers: Optional[Dict[str, Optional[str]]], optional
445
        """
446
        if extra_headers is None:
1✔
447
            extra_headers = {}
1✔
448
        request_body: Dict = {
1✔
449
            'items': items,
450
            'metadata_template': metadata_template,
451
            'fields': fields,
452
            'ai_agent': ai_agent,
453
        }
454
        headers_map: Dict[str, str] = prepare_params({**extra_headers})
1✔
455
        response: FetchResponse = self.network_session.network_client.fetch(
1✔
456
            FetchOptions(
457
                url=''.join(
458
                    [
459
                        self.network_session.base_urls.base_url,
460
                        '/2.0/ai/extract_structured',
461
                    ]
462
                ),
463
                method='POST',
464
                headers=headers_map,
465
                data=serialize(request_body),
466
                content_type='application/json',
467
                response_format=ResponseFormat.JSON,
468
                auth=self.auth,
469
                network_session=self.network_session,
470
            )
471
        )
472
        return deserialize(response.data, AiExtractStructuredResponse)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc