Skip to content

Commit

Permalink
feat: Add new parameters to Box AI methods and introduce `AiResponseF…
Browse files Browse the repository at this point in the history
…ull` variant (box/box-openapi#446) (#277)
  • Loading branch information
box-sdk-build authored Aug 20, 2024
1 parent b10fc93 commit 1267a21
Show file tree
Hide file tree
Showing 13 changed files with 167 additions and 77 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "98d8b60", "specHash": "9919482", "version": "1.3.0" }
{ "engineHash": "4ca165c", "specHash": "8d1ca31", "version": "1.2.0" }
45 changes: 16 additions & 29 deletions box_sdk_gen/managers/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,16 @@

from typing import Union

from box_sdk_gen.internal.utils import DateTime
from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

from box_sdk_gen.schemas.ai_response import AiResponse
from box_sdk_gen.schemas.ai_response_full import AiResponseFull

from box_sdk_gen.schemas.client_error import ClientError

from box_sdk_gen.schemas.ai_ask import AiAsk

from box_sdk_gen.schemas.ai_response import AiResponse

from box_sdk_gen.schemas.ai_text_gen import AiTextGen

from box_sdk_gen.schemas.ai_agent_ask import AiAgentAsk
Expand Down Expand Up @@ -114,29 +116,6 @@ def __init__(
self.content = content


class CreateAiTextGenDialogueHistory(BaseObject):
def __init__(
self,
*,
prompt: Optional[str] = None,
answer: Optional[str] = None,
created_at: Optional[DateTime] = None,
**kwargs
):
"""
:param prompt: The prompt previously provided by the client and answered by the LLM., defaults to None
:type prompt: Optional[str], optional
:param answer: The answer previously provided by the LLM., defaults to None
:type answer: Optional[str], optional
:param created_at: The ISO date formatted timestamp of when the previous answer to the prompt was created., defaults to None
:type created_at: Optional[DateTime], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
self.answer = answer
self.created_at = created_at


class GetAiAgentDefaultConfigMode(str, Enum):
ASK = 'ask'
TEXT_GEN = 'text_gen'
Expand All @@ -160,9 +139,11 @@ def create_ai_ask(
prompt: str,
items: List[CreateAiAskItems],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
include_citations: Optional[bool] = None,
ai_agent: Optional[AiAgentAsk] = None,
extra_headers: Optional[Dict[str, Optional[str]]] = None
) -> AiResponse:
) -> AiResponseFull:
"""
Sends an AI request to supported LLMs and returns an answer specifically focused on the user's question given the provided context.
:param mode: The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
Expand All @@ -175,6 +156,10 @@ def create_ai_ask(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
:type items: List[CreateAiAskItems]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
:type include_citations: Optional[bool], optional
:param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
:type extra_headers: Optional[Dict[str, Optional[str]]], optional
"""
Expand All @@ -184,6 +169,8 @@ def create_ai_ask(
'mode': mode,
'prompt': prompt,
'items': items,
'dialogue_history': dialogue_history,
'include_citations': include_citations,
'ai_agent': ai_agent,
}
headers_map: Dict[str, str] = prepare_params({**extra_headers})
Expand All @@ -199,14 +186,14 @@ def create_ai_ask(
network_session=self.network_session,
),
)
return deserialize(response.data, AiResponse)
return deserialize(response.data, AiResponseFull)

def create_ai_text_gen(
self,
prompt: str,
items: List[CreateAiTextGenItems],
*,
dialogue_history: Optional[List[CreateAiTextGenDialogueHistory]] = None,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
ai_agent: Optional[AiAgentTextGen] = None,
extra_headers: Optional[Dict[str, Optional[str]]] = None
) -> AiResponse:
Expand All @@ -221,7 +208,7 @@ def create_ai_text_gen(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
:type items: List[CreateAiTextGenItems]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[CreateAiTextGenDialogueHistory]], optional
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
:type extra_headers: Optional[Dict[str, Optional[str]]], optional
"""
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,12 +340,18 @@

from box_sdk_gen.schemas.ai_agent_text_gen import *

from box_sdk_gen.schemas.ai_text_gen import *

from box_sdk_gen.schemas.ai_agent_basic_text_tool_ask import *

from box_sdk_gen.schemas.ai_agent_ask import *

from box_sdk_gen.schemas.ai_citation import *

from box_sdk_gen.schemas.ai_response_full import *

from box_sdk_gen.schemas.ai_dialogue_history import *

from box_sdk_gen.schemas.ai_text_gen import *

from box_sdk_gen.schemas.ai_ask import *

from box_sdk_gen.schemas.completion_rule_variable import *
Expand Down
6 changes: 3 additions & 3 deletions box_sdk_gen/schemas/ai_agent_ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,16 @@ class AiAgentAsk(BaseObject):
def __init__(
self,
*,
type: Optional[AiAgentAskTypeField] = None,
type: AiAgentAskTypeField = AiAgentAskTypeField.AI_AGENT_ASK.value,
long_text: Optional[AiAgentLongTextTool] = None,
basic_text: Optional[AiAgentBasicTextToolAsk] = None,
long_text_multi: Optional[AiAgentLongTextTool] = None,
basic_text_multi: Optional[AiAgentBasicTextToolAsk] = None,
**kwargs
):
"""
:param type: The type of AI agent used to handle queries., defaults to None
:type type: Optional[AiAgentAskTypeField], optional
:param type: The type of AI agent used to handle queries., defaults to AiAgentAskTypeField.AI_AGENT_ASK.value
:type type: AiAgentAskTypeField, optional
"""
super().__init__(**kwargs)
self.type = type
Expand Down
6 changes: 3 additions & 3 deletions box_sdk_gen/schemas/ai_agent_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ class AiAgentTextGen(BaseObject):
def __init__(
self,
*,
type: Optional[AiAgentTextGenTypeField] = None,
type: AiAgentTextGenTypeField = AiAgentTextGenTypeField.AI_AGENT_TEXT_GEN.value,
basic_gen: Optional[AiAgentBasicGenTool] = None,
**kwargs
):
"""
:param type: The type of AI agent used for generating text., defaults to None
:type type: Optional[AiAgentTextGenTypeField], optional
:param type: The type of AI agent used for generating text., defaults to AiAgentTextGenTypeField.AI_AGENT_TEXT_GEN.value
:type type: AiAgentTextGenTypeField, optional
"""
super().__init__(**kwargs)
self.type = type
Expand Down
10 changes: 10 additions & 0 deletions box_sdk_gen/schemas/ai_ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from typing import List

from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

from box_sdk_gen.schemas.ai_agent_ask import AiAgentAsk


Expand Down Expand Up @@ -50,6 +52,8 @@ def __init__(
prompt: str,
items: List[AiAskItemsField],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
include_citations: Optional[bool] = None,
ai_agent: Optional[AiAgentAsk] = None,
**kwargs
):
Expand All @@ -64,9 +68,15 @@ def __init__(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
:type items: List[AiAskItemsField]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
:type include_citations: Optional[bool], optional
"""
super().__init__(**kwargs)
self.mode = mode
self.prompt = prompt
self.items = items
self.dialogue_history = dialogue_history
self.include_citations = include_citations
self.ai_agent = ai_agent
38 changes: 38 additions & 0 deletions box_sdk_gen/schemas/ai_citation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from enum import Enum

from typing import Optional

from box_sdk_gen.internal.base_object import BaseObject


class AiCitationTypeField(str, Enum):
FILE = 'file'


class AiCitation(BaseObject):
_discriminator = 'type', {'file'}

def __init__(
self,
*,
content: Optional[str] = None,
id: Optional[str] = None,
type: Optional[AiCitationTypeField] = None,
name: Optional[str] = None,
**kwargs
):
"""
:param content: The specific content from where the answer was referenced., defaults to None
:type content: Optional[str], optional
:param id: The id of the item., defaults to None
:type id: Optional[str], optional
:param type: The type of the item., defaults to None
:type type: Optional[AiCitationTypeField], optional
:param name: The name of the item., defaults to None
:type name: Optional[str], optional
"""
super().__init__(**kwargs)
self.content = content
self.id = id
self.type = type
self.name = name
28 changes: 28 additions & 0 deletions box_sdk_gen/schemas/ai_dialogue_history.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from typing import Optional

from box_sdk_gen.internal.base_object import BaseObject

from box_sdk_gen.internal.utils import DateTime


class AiDialogueHistory(BaseObject):
def __init__(
self,
*,
prompt: Optional[str] = None,
answer: Optional[str] = None,
created_at: Optional[DateTime] = None,
**kwargs
):
"""
:param prompt: The prompt previously provided by the client and answered by the LLM., defaults to None
:type prompt: Optional[str], optional
:param answer: The answer previously provided by the LLM., defaults to None
:type answer: Optional[str], optional
:param created_at: The ISO date formatted timestamp of when the previous answer to the prompt was created., defaults to None
:type created_at: Optional[DateTime], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
self.answer = answer
self.created_at = created_at
38 changes: 38 additions & 0 deletions box_sdk_gen/schemas/ai_response_full.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from typing import Optional

from typing import List

from box_sdk_gen.internal.utils import DateTime

from box_sdk_gen.schemas.ai_response import AiResponse

from box_sdk_gen.schemas.ai_citation import AiCitation


class AiResponseFull(AiResponse):
def __init__(
self,
answer: str,
created_at: DateTime,
*,
citations: Optional[List[AiCitation]] = None,
completion_reason: Optional[str] = None,
**kwargs
):
"""
:param answer: The answer provided by the LLM.
:type answer: str
:param created_at: The ISO date formatted timestamp of when the answer to the prompt was created.
:type created_at: DateTime
:param citations: The citations of the LLM's answer reference., defaults to None
:type citations: Optional[List[AiCitation]], optional
:param completion_reason: The reason the response finishes., defaults to None
:type completion_reason: Optional[str], optional
"""
super().__init__(
answer=answer,
created_at=created_at,
completion_reason=completion_reason,
**kwargs
)
self.citations = citations
31 changes: 4 additions & 27 deletions box_sdk_gen/schemas/ai_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@

from typing import List

from box_sdk_gen.schemas.ai_agent_text_gen import AiAgentTextGen
from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

from box_sdk_gen.internal.utils import DateTime
from box_sdk_gen.schemas.ai_agent_text_gen import AiAgentTextGen


class AiTextGenItemsTypeField(str, Enum):
Expand Down Expand Up @@ -40,36 +40,13 @@ def __init__(
self.content = content


class AiTextGenDialogueHistoryField(BaseObject):
def __init__(
self,
*,
prompt: Optional[str] = None,
answer: Optional[str] = None,
created_at: Optional[DateTime] = None,
**kwargs
):
"""
:param prompt: The prompt previously provided by the client and answered by the LLM., defaults to None
:type prompt: Optional[str], optional
:param answer: The answer previously provided by the LLM., defaults to None
:type answer: Optional[str], optional
:param created_at: The ISO date formatted timestamp of when the previous answer to the prompt was created., defaults to None
:type created_at: Optional[DateTime], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
self.answer = answer
self.created_at = created_at


class AiTextGen(BaseObject):
def __init__(
self,
prompt: str,
items: List[AiTextGenItemsField],
*,
dialogue_history: Optional[List[AiTextGenDialogueHistoryField]] = None,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
ai_agent: Optional[AiAgentTextGen] = None,
**kwargs
):
Expand All @@ -83,7 +60,7 @@ def __init__(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
:type items: List[AiTextGenItemsField]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiTextGenDialogueHistoryField]], optional
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
Expand Down
Loading

0 comments on commit 1267a21

Please sign in to comment.