tabbyAPI-ollama/tests/wheel_test.py
kingbri 9f34af4906 Tests: Create
Add a few tests for the user to check if stuff works.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-12-06 00:53:42 -05:00

47 lines
1.5 KiB
Python

import traceback
from importlib.metadata import version
successful_packages = []
errored_packages = []
try:
import flash_attn
print(f"Flash attention on version {version('flash_attn')} successfully imported")
successful_packages.append("flash_attn")
except:
print("Flash attention could not be loaded because:")
print(traceback.format_exc())
errored_packages.append("flash_attn")
try:
import exllamav2
print(f"Exllamav2 on version {version('exllamav2')} successfully imported")
successful_packages.append("exllamav2")
except:
print("Exllamav2 could not be loaded because:")
print(traceback.format_exc())
errored_packages.append("exllamav2")
try:
import torch
print(f"Torch on version {version('torch')} successfully imported")
successful_packages.append("torch")
except:
print("Torch could not be loaded because:")
print(traceback.format_exc())
errored_packages.append("torch")
try:
import fastchat
print(f"Fastchat on version {version('fastchat')} successfully imported")
successful_packages.append("fastchat")
except:
print("Fastchat is only needed for chat completions with message arrays. Ignore this error if this isn't your usecase.")
print("Fastchat could not be loaded because:")
print(traceback.format_exc())
errored_packages.append("fastchat")
print(
f"\nSuccessful imports: {', '.join(successful_packages)}",
f"\nErrored imports: {''.join(errored_packages)}"
)