Sampling: Remove skip_special_tokens

This parameter is way too confusing and does not make sense in
the modern LLM space.

Change approved by all maintainers.

Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
kingbri 2025-05-09 22:11:05 -04:00
parent 25c77ebf77
commit 42346c6b39
3 changed files with 0 additions and 9 deletions

View file

@ -1373,7 +1373,6 @@ class ExllamaV2Container(BaseModelContainer):
min_new_tokens=params.min_tokens,
gen_settings=gen_settings,
stop_conditions=stop_conditions,
decode_special_tokens=not params.skip_special_tokens,
filters=grammar_handler.filters,
filter_prefer_eos=bool(grammar_handler.filters),
return_probs=params.logprobs > 0,

View file

@ -215,11 +215,6 @@ class BaseSamplerRequest(BaseModel):
examples=[False],
)
skip_special_tokens: Optional[bool] = Field(
default_factory=lambda: get_default_sampler_value("skip_special_tokens", True),
examples=[True],
)
logit_bias: Optional[Dict[int, float]] = Field(
default_factory=lambda: get_default_sampler_value("logit_bias"),
examples=[{"1": 10, "2": 50}],

View file

@ -136,9 +136,6 @@ add_bos_token:
ban_eos_token:
override: false
force: false
skip_special_tokens:
override: true
force: false
logit_bias:
override:
force: false