Model: Fix logit bias handling
If the token doesn't exist, gracefully warn instead of erroring out. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
parent
aa34b2e5fd
commit
7def32e4de
2 changed files with 8 additions and 2 deletions
|
|
@ -750,7 +750,13 @@ class ExllamaV2Container:
|
|||
|
||||
# Map logits to the tensor with their biases
|
||||
for token, bias in logit_bias.items():
|
||||
gen_settings.token_bias[token] = bias
|
||||
if token in gen_settings.token_bias:
|
||||
gen_settings.token_bias[token] = bias
|
||||
else:
|
||||
logger.warning(
|
||||
f"Logit bias: Token {token} not present "
|
||||
"in the model's vocab. Skipping."
|
||||
)
|
||||
|
||||
# Ban the EOS token if specified. If not, append to stop conditions
|
||||
# as well.
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ class BaseSamplerRequest(BaseModel):
|
|||
|
||||
logit_bias: Optional[Dict[int, float]] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("logit_bias"),
|
||||
examples=[[{"1": 10}]],
|
||||
examples=[{"1": 10, "2": 50}],
|
||||
)
|
||||
|
||||
negative_prompt: Optional[str] = Field(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue