Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "c2a365c", "specHash": "6a332e7", "version": "1.14.0" }
{ "engineHash": "c2a365c", "specHash": "4e677e3", "version": "1.14.0" }
4 changes: 4 additions & 0 deletions box_sdk_gen/schemas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,12 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_ibm import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import *

from box_sdk_gen.schemas.ai_llm_endpoint_params import *

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import *

from box_sdk_gen.schemas.ai_agent_basic_text_tool_text_gen import *
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_agent_basic_gen_tool.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
from typing import Optional

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand Down Expand Up @@ -35,13 +29,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -60,8 +48,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
embeddings=embeddings,
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_agent_basic_text_tool.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
from typing import Optional

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand All @@ -21,13 +15,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -41,8 +29,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
model=model,
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_agent_basic_text_tool_base.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,8 @@
from typing import Optional

from typing import Union

from box_sdk_gen.internal.base_object import BaseObject

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.box.errors import BoxSDKError

Expand All @@ -19,22 +13,14 @@ def __init__(
*,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
:param model: The model used for the AI agent for basic text. For specific model values, see the [available models list](g://box-ai/supported-models)., defaults to None
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(**kwargs)
self.model = model
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_agent_basic_text_tool_text_gen.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
from typing import Optional

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand All @@ -21,13 +15,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -43,8 +31,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
model=model,
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_agent_long_text_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,7 @@

from box_sdk_gen.internal.base_object import BaseObject

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand Down Expand Up @@ -62,13 +56,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -82,8 +70,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
system_message=system_message,
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_agent_long_text_tool_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,7 @@

from box_sdk_gen.internal.base_object import BaseObject

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand Down Expand Up @@ -64,13 +58,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -86,8 +74,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
system_message=system_message,
Expand Down
18 changes: 18 additions & 0 deletions box_sdk_gen/schemas/ai_llm_endpoint_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws

from box_sdk_gen.schemas.ai_llm_endpoint_params_ibm import AiLlmEndpointParamsIbm

from box_sdk_gen.box.errors import BoxSDKError

AiLlmEndpointParams = Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
AiLlmEndpointParamsIbm,
]
47 changes: 47 additions & 0 deletions box_sdk_gen/schemas/ai_llm_endpoint_params_ibm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from enum import Enum

from typing import Optional

from box_sdk_gen.internal.base_object import BaseObject

from box_sdk_gen.box.errors import BoxSDKError


class AiLlmEndpointParamsIbmTypeField(str, Enum):
IBM_PARAMS = 'ibm_params'


class AiLlmEndpointParamsIbm(BaseObject):
_discriminator = 'type', {'ibm_params'}

def __init__(
self,
*,
type: AiLlmEndpointParamsIbmTypeField = AiLlmEndpointParamsIbmTypeField.IBM_PARAMS,
temperature: Optional[float] = None,
top_p: Optional[float] = None,
top_k: Optional[float] = None,
**kwargs
):
"""
:param type: The type of the AI LLM endpoint params object for IBM.
This parameter is **required**., defaults to AiLlmEndpointParamsIbmTypeField.IBM_PARAMS
:type type: AiLlmEndpointParamsIbmTypeField, optional
:param temperature: What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random,
while lower values like 0.2 will make it more focused and deterministic.
We generally recommend altering this or `top_p` but not both., defaults to None
:type temperature: Optional[float], optional
:param top_p: An alternative to sampling with temperature, called nucleus sampling, where the model considers the results
of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability
mass are considered. We generally recommend altering this or temperature but not both., defaults to None
:type top_p: Optional[float], optional
:param top_k: `Top-K` changes how the model selects tokens for output. A `top-K` of 1 means the next selected token is
the most probable among all tokens in the model's vocabulary (also called greedy decoding),
while a `top-K` of 3 means that the next token is selected from among the three most probable tokens by using temperature., defaults to None
:type top_k: Optional[float], optional
"""
super().__init__(**kwargs)
self.type = type
self.temperature = temperature
self.top_p = top_p
self.top_k = top_k
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_studio_agent_basic_gen_tool.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
from typing import Optional

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand Down Expand Up @@ -38,13 +32,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -65,8 +53,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
content_template=content_template,
Expand Down
18 changes: 2 additions & 16 deletions box_sdk_gen/schemas/ai_studio_agent_basic_gen_tool_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,7 @@

from typing import List

from typing import Union

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws
from box_sdk_gen.schemas.ai_llm_endpoint_params import AiLlmEndpointParams

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

Expand Down Expand Up @@ -43,13 +37,7 @@ def __init__(
prompt_template: Optional[str] = None,
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
llm_endpoint_params: Optional[AiLlmEndpointParams] = None,
**kwargs
):
"""
Expand All @@ -72,8 +60,6 @@ def __init__(
:type model: Optional[str], optional
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
is_custom_instructions_included=is_custom_instructions_included,
Expand Down
Loading
Loading