Model: Remove generate_window

Not required since we error with exceeding the max_seq_len

Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
kingbri 2025-04-16 12:58:23 -04:00
parent 2f5235e1a3
commit 1afc9b983e
3 changed files with 0 additions and 12 deletions

View file

@ -1242,9 +1242,6 @@ class ExllamaV2Container:
grammar_handler = ExLlamaV2Grammar() grammar_handler = ExLlamaV2Grammar()
banned_strings = [] banned_strings = []
# TODO: Not used for some reason?
generate_window = max(params.generate_window, self.config.max_seq_len // 8)
self.assign_gen_params( self.assign_gen_params(
params, params,
gen_settings, gen_settings,

View file

@ -41,12 +41,6 @@ class BaseSamplerRequest(BaseModel):
ge=0, ge=0,
) )
generate_window: Optional[int] = Field(
default_factory=lambda: get_default_sampler_value("generate_window", 512),
examples=[512],
ge=0,
)
stop: Optional[Union[str, List[Union[str, int]]]] = Field( stop: Optional[Union[str, List[Union[str, int]]]] = Field(
default_factory=lambda: get_default_sampler_value("stop", []), default_factory=lambda: get_default_sampler_value("stop", []),
validation_alias=AliasChoices("stop", "stop_sequence"), validation_alias=AliasChoices("stop", "stop_sequence"),

View file

@ -14,9 +14,6 @@ max_tokens:
min_tokens: min_tokens:
override: 0 override: 0
force: false force: false
generate_window:
override: 512
force: false
stop: stop:
override: [] override: []
force: false force: false