|
| 1 | +from typing import Optional |
| 2 | + |
| 3 | +from typing import List |
| 4 | + |
| 5 | +from typing import Union |
| 6 | + |
| 7 | +from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import AiLlmEndpointParamsOpenAi |
| 8 | + |
| 9 | +from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle |
| 10 | + |
| 11 | +from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws |
| 12 | + |
| 13 | +from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase |
| 14 | + |
| 15 | +from box_sdk_gen.schemas.ai_agent_basic_text_tool_text_gen import ( |
| 16 | + AiAgentBasicTextToolTextGen, |
| 17 | +) |
| 18 | + |
| 19 | +from box_sdk_gen.schemas.ai_agent_long_text_tool_text_gen import ( |
| 20 | + AiAgentLongTextToolTextGenEmbeddingsField, |
| 21 | +) |
| 22 | + |
| 23 | +from box_sdk_gen.schemas.ai_agent_long_text_tool_text_gen import ( |
| 24 | + AiAgentLongTextToolTextGen, |
| 25 | +) |
| 26 | + |
| 27 | +from box_sdk_gen.schemas.ai_agent_basic_gen_tool import AiAgentBasicGenTool |
| 28 | + |
| 29 | +from box_sdk_gen.schemas.ai_studio_agent_basic_gen_tool import AiStudioAgentBasicGenTool |
| 30 | + |
| 31 | +from box_sdk_gen.box.errors import BoxSDKError |
| 32 | + |
| 33 | + |
| 34 | +class AiStudioAgentBasicGenToolResponse(AiStudioAgentBasicGenTool): |
| 35 | + def __init__( |
| 36 | + self, |
| 37 | + *, |
| 38 | + warnings: Optional[List[str]] = None, |
| 39 | + is_custom_instructions_included: Optional[bool] = None, |
| 40 | + content_template: Optional[str] = None, |
| 41 | + embeddings: Optional[AiAgentLongTextToolTextGenEmbeddingsField] = None, |
| 42 | + system_message: Optional[str] = None, |
| 43 | + prompt_template: Optional[str] = None, |
| 44 | + model: Optional[str] = None, |
| 45 | + num_tokens_for_completion: Optional[int] = None, |
| 46 | + llm_endpoint_params: Optional[ |
| 47 | + Union[ |
| 48 | + AiLlmEndpointParamsOpenAi, |
| 49 | + AiLlmEndpointParamsGoogle, |
| 50 | + AiLlmEndpointParamsAws, |
| 51 | + ] |
| 52 | + ] = None, |
| 53 | + **kwargs |
| 54 | + ): |
| 55 | + """ |
| 56 | + :param warnings: Warnings concerning tool, defaults to None |
| 57 | + :type warnings: Optional[List[str]], optional |
| 58 | + :param is_custom_instructions_included: True if system message contains custom instructions placeholder, false otherwise, defaults to None |
| 59 | + :type is_custom_instructions_included: Optional[bool], optional |
| 60 | + :param content_template: How the content should be included in a request to the LLM. |
| 61 | + Input for `{content}` is optional, depending on the use., defaults to None |
| 62 | + :type content_template: Optional[str], optional |
| 63 | + :param system_message: System messages aim at helping the LLM understand its role and what it is supposed to do. |
| 64 | + The input for `{current_date}` is optional, depending on the use., defaults to None |
| 65 | + :type system_message: Optional[str], optional |
| 66 | + :param prompt_template: The prompt template contains contextual information of the request and the user prompt. |
| 67 | +
|
| 68 | + When using the `prompt_template` parameter, you **must include** input for `{user_question}`. |
| 69 | + Inputs for `{current_date}` and `{content}` are optional, depending on the use., defaults to None |
| 70 | + :type prompt_template: Optional[str], optional |
| 71 | + :param model: The model used for the AI agent for basic text. For specific model values, see the [available models list](g://box-ai/supported-models)., defaults to None |
| 72 | + :type model: Optional[str], optional |
| 73 | + :param num_tokens_for_completion: The number of tokens for completion., defaults to None |
| 74 | + :type num_tokens_for_completion: Optional[int], optional |
| 75 | + :param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None |
| 76 | + :type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional |
| 77 | + """ |
| 78 | + super().__init__( |
| 79 | + is_custom_instructions_included=is_custom_instructions_included, |
| 80 | + content_template=content_template, |
| 81 | + embeddings=embeddings, |
| 82 | + system_message=system_message, |
| 83 | + prompt_template=prompt_template, |
| 84 | + model=model, |
| 85 | + num_tokens_for_completion=num_tokens_for_completion, |
| 86 | + llm_endpoint_params=llm_endpoint_params, |
| 87 | + **kwargs |
| 88 | + ) |
| 89 | + self.warnings = warnings |
0 commit comments