Model: Remove generate_window
Not required since we error with exceeding the max_seq_len Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
parent
2f5235e1a3
commit
1afc9b983e
3 changed files with 0 additions and 12 deletions
|
|
@ -1242,9 +1242,6 @@ class ExllamaV2Container:
|
|||
grammar_handler = ExLlamaV2Grammar()
|
||||
banned_strings = []
|
||||
|
||||
# TODO: Not used for some reason?
|
||||
generate_window = max(params.generate_window, self.config.max_seq_len // 8)
|
||||
|
||||
self.assign_gen_params(
|
||||
params,
|
||||
gen_settings,
|
||||
|
|
|
|||
|
|
@ -41,12 +41,6 @@ class BaseSamplerRequest(BaseModel):
|
|||
ge=0,
|
||||
)
|
||||
|
||||
generate_window: Optional[int] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("generate_window", 512),
|
||||
examples=[512],
|
||||
ge=0,
|
||||
)
|
||||
|
||||
stop: Optional[Union[str, List[Union[str, int]]]] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("stop", []),
|
||||
validation_alias=AliasChoices("stop", "stop_sequence"),
|
||||
|
|
|
|||
|
|
@ -14,9 +14,6 @@ max_tokens:
|
|||
min_tokens:
|
||||
override: 0
|
||||
force: false
|
||||
generate_window:
|
||||
override: 512
|
||||
force: false
|
||||
stop:
|
||||
override: []
|
||||
force: false
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue