From 707d005aad8f9391a25ca6198e76f4ea9d63e93b Mon Sep 17 00:00:00 2001 From: kingbri <8082010+kingbri1@users.noreply.github.com> Date: Fri, 11 Jul 2025 01:11:09 -0400 Subject: [PATCH] API: Default tool call ID and type Doing this helps reduce the model's burden of generating the tool call ID and type (which is always "function"). Follow mistral's spec for tool call IDs by using a 9 character alphanumeric string. Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com> --- endpoints/OAI/types/tools.py | 7 ++++--- endpoints/OAI/utils/chat_completion.py | 8 +++----- endpoints/OAI/utils/tools.py | 4 +--- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/endpoints/OAI/types/tools.py b/endpoints/OAI/types/tools.py index c9ccd8b..b5b9611 100644 --- a/endpoints/OAI/types/tools.py +++ b/endpoints/OAI/types/tools.py @@ -1,5 +1,6 @@ -from pydantic import BaseModel +from pydantic import BaseModel, Field from typing import Dict, Literal +from uuid import uuid4 class Function(BaseModel): @@ -29,6 +30,6 @@ class Tool(BaseModel): class ToolCall(BaseModel): """Represents an OAI tool description.""" - id: str + id: str = Field(default_factory=lambda: str(uuid4()).replace("-", "")[:9]) function: Tool - type: Literal["function"] + type: Literal["function"] = "function" diff --git a/endpoints/OAI/utils/chat_completion.py b/endpoints/OAI/utils/chat_completion.py index edd9b34..a9bd888 100644 --- a/endpoints/OAI/utils/chat_completion.py +++ b/endpoints/OAI/utils/chat_completion.py @@ -70,12 +70,11 @@ def _create_response( logprob_response = ChatCompletionLogprobs(content=collected_token_probs) - # Initialize finish_reason with a default value or from generation data - finish_reason = generation.get("finish_reason", "stop") - - # If a tool call is present, mark the finish reason as such + # Set finish reason if message.tool_calls: finish_reason = "tool_calls" + else: + finish_reason = generation.get("finish_reason", "stop") choice = ChatCompletionRespChoice( index=index, @@ -152,7 +151,6 @@ def _create_stream_chunk( choice.finish_reason = "tool_calls" choices.append(choice) - else: message = ChatCompletionMessage( role="assistant", content=unwrap(generation.get("text"), "") diff --git a/endpoints/OAI/utils/tools.py b/endpoints/OAI/utils/tools.py index 8473d60..c1ebded 100644 --- a/endpoints/OAI/utils/tools.py +++ b/endpoints/OAI/utils/tools.py @@ -11,7 +11,6 @@ TOOL_CALL_SCHEMA = { "items": { "type": "object", "properties": { - "id": {"type": "string"}, "function": { "type": "object", "properties": { @@ -23,9 +22,8 @@ TOOL_CALL_SCHEMA = { }, "required": ["name", "arguments"], }, - "type": {"type": "string", "enum": ["function"]}, }, - "required": ["id", "function", "type"], + "required": ["function"], }, }