diff --git a/backends/exllamav2/version.py b/backends/exllamav2/version.py index fc4afb1..cdaeaeb 100644 --- a/backends/exllamav2/version.py +++ b/backends/exllamav2/version.py @@ -22,7 +22,7 @@ def check_exllama_version(): "pip install --upgrade .[amd]\n\n" ) - if not dependencies.exl2: + if not dependencies.exllamav2: raise SystemExit(("Exllamav2 is not installed.\n" + install_message)) required_version = version.parse("0.2.2") diff --git a/common/model.py b/common/model.py index 296f684..87b06ad 100644 --- a/common/model.py +++ b/common/model.py @@ -15,7 +15,7 @@ from common.networking import handle_request_error from common.tabby_config import config from common.optional_dependencies import dependencies -if dependencies.exl2: +if dependencies.exllamav2: from backends.exllamav2.model import ExllamaV2Container # Global model container diff --git a/common/optional_dependencies.py b/common/optional_dependencies.py index fd2efe3..e98a668 100644 --- a/common/optional_dependencies.py +++ b/common/optional_dependencies.py @@ -25,7 +25,7 @@ class DependenciesModel(BaseModel): @computed_field @property - def exl2(self) -> bool: + def inference(self) -> bool: return self.torch and self.exllamav2 and self.flash_attn