tabbyAPI-ollama/backends/exllamav2/version.py
kingbri e0ffa90865 Dependencies: Change handling of exllamav2 checks
ExllamaV2 should check for solely exllamav2, otherwise errors don't
make sense. Migrate the combined "exl2" computed property to "inference"
since those are the required dependencies for minimal inference.

Signed-off-by: kingbri <bdashore3@proton.me>
2024-09-22 12:57:28 -04:00

39 lines
1.4 KiB
Python

import platform
from packaging import version
from importlib.metadata import version as package_version
from loguru import logger
from common.optional_dependencies import dependencies
def check_exllama_version():
"""Verifies the exllama version"""
install_message = (
"Please update your environment by running an update script "
"(update_scripts/"
f"update_deps.{'bat' if platform.system() == 'Windows' else 'sh'})\n\n"
"Or you can manually run a requirements update "
"using the following command:\n\n"
"For CUDA 12.1:\n"
"pip install --upgrade .[cu121]\n\n"
"For CUDA 11.8:\n"
"pip install --upgrade .[cu118]\n\n"
"For ROCm:\n"
"pip install --upgrade .[amd]\n\n"
)
if not dependencies.exllamav2:
raise SystemExit(("Exllamav2 is not installed.\n" + install_message))
required_version = version.parse("0.2.2")
current_version = version.parse(package_version("exllamav2").split("+")[0])
unsupported_message = (
f"ERROR: TabbyAPI requires ExLlamaV2 {required_version} "
f"or greater. Your current version is {current_version}.\n" + install_message
)
if current_version < required_version:
raise SystemExit(unsupported_message)
else:
logger.info(f"ExllamaV2 version: {current_version}")