Skip to content

Commit 29f0364

Browse files
feat: Add hubs support to /ai/ask (box/box-openapi#506) (#466)
1 parent 9779bd9 commit 29f0364

File tree

7 files changed

+74
-27
lines changed

7 files changed

+74
-27
lines changed

.codegen.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
{ "engineHash": "7874ac3", "specHash": "1fdcbef", "version": "1.10.0" }
1+
{ "engineHash": "7874ac3", "specHash": "764e12c", "version": "1.10.0" }

box_sdk_gen/managers/ai.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,18 +10,20 @@
1010

1111
from box_sdk_gen.serialization.json import serialize
1212

13-
from box_sdk_gen.serialization.json import deserialize
14-
1513
from box_sdk_gen.internal.utils import to_string
1614

15+
from box_sdk_gen.serialization.json import deserialize
16+
1717
from typing import Union
1818

19-
from box_sdk_gen.schemas.ai_item_base import AiItemBase
19+
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk
2020

2121
from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory
2222

2323
from box_sdk_gen.networking.fetch_options import ResponseFormat
2424

25+
from box_sdk_gen.schemas.ai_item_base import AiItemBase
26+
2527
from box_sdk_gen.schemas.ai_response_full import AiResponseFull
2628

2729
from box_sdk_gen.schemas.client_error import ClientError
@@ -214,13 +216,13 @@ def create_ai_ask(
214216
self,
215217
mode: CreateAiAskMode,
216218
prompt: str,
217-
items: List[AiItemBase],
219+
items: List[AiItemAsk],
218220
*,
219221
dialogue_history: Optional[List[AiDialogueHistory]] = None,
220222
include_citations: Optional[bool] = None,
221223
ai_agent: Optional[AiAgentAsk] = None,
222224
extra_headers: Optional[Dict[str, Optional[str]]] = None
223-
) -> AiResponseFull:
225+
) -> Optional[AiResponseFull]:
224226
"""
225227
Sends an AI request to supported LLMs and returns an answer specifically focused on the user's question given the provided context.
226228
:param mode: The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
@@ -232,7 +234,7 @@ def create_ai_ask(
232234
**Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first.
233235
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
234236
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
235-
:type items: List[AiItemBase]
237+
:type items: List[AiItemAsk]
236238
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
237239
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
238240
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
@@ -263,6 +265,8 @@ def create_ai_ask(
263265
network_session=self.network_session,
264266
)
265267
)
268+
if to_string(response.status) == '204':
269+
return None
266270
return deserialize(response.data, AiResponseFull)
267271

268272
def create_ai_text_gen(

box_sdk_gen/schemas/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414

1515
from box_sdk_gen.schemas.ai_item_base import *
1616

17+
from box_sdk_gen.schemas.ai_item_ask import *
18+
1719
from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import *
1820

1921
from box_sdk_gen.schemas.ai_llm_endpoint_params_google import *

box_sdk_gen/schemas/ai_ask.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
from box_sdk_gen.internal.base_object import BaseObject
88

9-
from box_sdk_gen.schemas.ai_item_base import AiItemBase
9+
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk
1010

1111
from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory
1212

@@ -25,7 +25,7 @@ def __init__(
2525
self,
2626
mode: AiAskModeField,
2727
prompt: str,
28-
items: List[AiItemBase],
28+
items: List[AiItemAsk],
2929
*,
3030
dialogue_history: Optional[List[AiDialogueHistory]] = None,
3131
include_citations: Optional[bool] = None,
@@ -42,7 +42,7 @@ def __init__(
4242
**Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first.
4343
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
4444
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
45-
:type items: List[AiItemBase]
45+
:type items: List[AiItemAsk]
4646
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
4747
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
4848
:param include_citations: A flag to indicate whether citations should be returned., defaults to None

box_sdk_gen/schemas/ai_item_ask.py

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
from enum import Enum
2+
3+
from typing import Optional
4+
5+
from box_sdk_gen.internal.base_object import BaseObject
6+
7+
from box_sdk_gen.box.errors import BoxSDKError
8+
9+
10+
class AiItemAskTypeField(str, Enum):
11+
FILE = 'file'
12+
HUBS = 'hubs'
13+
14+
15+
class AiItemAsk(BaseObject):
16+
_discriminator = 'type', {'file', 'hubs'}
17+
18+
def __init__(
19+
self,
20+
id: str,
21+
type: AiItemAskTypeField,
22+
*,
23+
content: Optional[str] = None,
24+
**kwargs
25+
):
26+
"""
27+
:param id: The ID of the file.
28+
:type id: str
29+
:param type: The type of the item. A `hubs` item must be used as a single item.
30+
:type type: AiItemAskTypeField
31+
:param content: The content of the item, often the text representation., defaults to None
32+
:type content: Optional[str], optional
33+
"""
34+
super().__init__(**kwargs)
35+
self.id = id
36+
self.type = type
37+
self.content = content

docs/ai.md

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,14 @@ client.ai.create_ai_ask(
2222
CreateAiAskMode.MULTIPLE_ITEM_QA,
2323
"Which direction sun rises?",
2424
[
25-
AiItemBase(
25+
AiItemAsk(
2626
id=file_to_ask_1.id,
27-
type=AiItemBaseTypeField.FILE,
27+
type=AiItemAskTypeField.FILE,
2828
content="Earth goes around the sun",
2929
),
30-
AiItemBase(
30+
AiItemAsk(
3131
id=file_to_ask_2.id,
32-
type=AiItemBaseTypeField.FILE,
32+
type=AiItemAskTypeField.FILE,
3333
content="Sun rises in the East in the morning",
3434
),
3535
],
@@ -42,7 +42,7 @@ client.ai.create_ai_ask(
4242
- The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
4343
- prompt `str`
4444
- The prompt provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters.
45-
- items `List[AiItemBase]`
45+
- items `List[AiItemAsk]`
4646
- The items to be processed by the LLM, often files. **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first. If the file size exceeds 1MB, the first 1MB of text representation will be processed. If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
4747
- dialogue_history `Optional[List[AiDialogueHistory]]`
4848
- The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response.
@@ -55,9 +55,9 @@ client.ai.create_ai_ask(
5555

5656
### Returns
5757

58-
This function returns a value of type `AiResponseFull`.
58+
This function returns a value of type `Optional[AiResponseFull]`.
5959

60-
A successful response including the answer from the LLM.
60+
A successful response including the answer from the LLM.No content is available to answer the question. This is returned when the request item is a hub, but content in the hubs is not indexed. To ensure content in the hub is indexed, make sure Box AI for Hubs in the Admin Console was enabled before hub creation.
6161

6262
## Generate text
6363

test/ai.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from typing import Optional
2+
13
from typing import Union
24

35
from box_sdk_gen.internal.utils import to_string
@@ -12,9 +14,9 @@
1214

1315
from box_sdk_gen.managers.ai import CreateAiAskMode
1416

15-
from box_sdk_gen.schemas.ai_item_base import AiItemBase
17+
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk
1618

17-
from box_sdk_gen.schemas.ai_item_base import AiItemBaseTypeField
19+
from box_sdk_gen.schemas.ai_item_ask import AiItemAskTypeField
1820

1921
from box_sdk_gen.schemas.ai_response import AiResponse
2022

@@ -30,6 +32,8 @@
3032

3133
from box_sdk_gen.managers.uploads import UploadFileAttributesParentField
3234

35+
from box_sdk_gen.schemas.ai_item_base import AiItemBase
36+
3337
from box_sdk_gen.schemas.ai_extract_structured_response import (
3438
AiExtractStructuredResponse,
3539
)
@@ -88,13 +92,13 @@ def testAskAISingleItem():
8892
GetAiAgentDefaultConfigMode.ASK, language='en-US'
8993
)
9094
file_to_ask: FileFull = upload_new_file()
91-
response: AiResponseFull = client.ai.create_ai_ask(
95+
response: Optional[AiResponseFull] = client.ai.create_ai_ask(
9296
CreateAiAskMode.SINGLE_ITEM_QA,
9397
'which direction sun rises',
9498
[
95-
AiItemBase(
99+
AiItemAsk(
96100
id=file_to_ask.id,
97-
type=AiItemBaseTypeField.FILE,
101+
type=AiItemAskTypeField.FILE,
98102
content='Sun rises in the East',
99103
)
100104
],
@@ -108,18 +112,18 @@ def testAskAISingleItem():
108112
def testAskAIMultipleItems():
109113
file_to_ask_1: FileFull = upload_new_file()
110114
file_to_ask_2: FileFull = upload_new_file()
111-
response: AiResponseFull = client.ai.create_ai_ask(
115+
response: Optional[AiResponseFull] = client.ai.create_ai_ask(
112116
CreateAiAskMode.MULTIPLE_ITEM_QA,
113117
'Which direction sun rises?',
114118
[
115-
AiItemBase(
119+
AiItemAsk(
116120
id=file_to_ask_1.id,
117-
type=AiItemBaseTypeField.FILE,
121+
type=AiItemAskTypeField.FILE,
118122
content='Earth goes around the sun',
119123
),
120-
AiItemBase(
124+
AiItemAsk(
121125
id=file_to_ask_2.id,
122-
type=AiItemBaseTypeField.FILE,
126+
type=AiItemAskTypeField.FILE,
123127
content='Sun rises in the East in the morning',
124128
),
125129
],

0 commit comments

Comments
 (0)