Sampling: Add max_completion_tokens
Conforms with OAI's updated spec Signed-off-by: kingbri <8082010+bdashore3@users.noreply.github.com>
This commit is contained in:
parent
bc3c154c96
commit
c23e406f2d
1 changed files with 3 additions and 1 deletions
|
|
@ -25,7 +25,9 @@ class BaseSamplerRequest(BaseModel):
|
|||
|
||||
max_tokens: Optional[int] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("max_tokens"),
|
||||
validation_alias=AliasChoices("max_tokens", "max_length"),
|
||||
validation_alias=AliasChoices(
|
||||
"max_tokens", "max_completion_tokens", "max_length"
|
||||
),
|
||||
description="Aliases: max_length",
|
||||
examples=[150],
|
||||
ge=0,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue