Requirements: Pin versions for some dependencies

Pydantic and Jinja2 need pinned versions.

Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
kingbri 2023-12-19 21:48:04 -05:00
parent 1fd38c61de
commit da69ad8cd3
5 changed files with 9 additions and 9 deletions

View file

@ -192,7 +192,7 @@ async def unload_model():
@app.get("/v1/lora/list", dependencies=[Depends(check_api_key)])
async def get_all_loras():
model_config = unwrap(config.get("model"), {})
lora_config = unwrap(model_config.get("lora"), {})
lora_config = unwrap(model_config.get("lora"), {})
lora_path = pathlib.Path(unwrap(lora_config.get("lora_dir"), "loras"))
loras = get_lora_list(lora_path.resolve())

View file

@ -8,8 +8,8 @@ https://github.com/turboderp/exllamav2/releases/download/v0.0.11/exllamav2-0.0.1
# Pip dependencies
fastapi
pydantic
pydantic >= 2.0.0
PyYAML
progress
uvicorn
jinja2
jinja2 >= 3.0.0

View file

@ -14,11 +14,11 @@ https://github.com/turboderp/exllamav2/releases/download/v0.0.11/exllamav2-0.0.1
# Pip dependencies
fastapi
pydantic
pydantic >= 2.0.0
PyYAML
progress
uvicorn
jinja2
jinja2 >= 3.0.0
# Linux FA2 from https://github.com/Dao-AILab/flash-attention/releases
https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.6/flash_attn-2.3.6+cu118torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"

View file

@ -1,7 +1,7 @@
# Pip dependencies
fastapi
pydantic
pydantic >= 2.0.0
PyYAML
progress
uvicorn
jinja2
jinja2 >= 3.0.0

View file

@ -14,11 +14,11 @@ https://github.com/turboderp/exllamav2/releases/download/v0.0.11/exllamav2-0.0.1
# Pip dependencies
fastapi
pydantic
pydantic >= 2.0.0
PyYAML
progress
uvicorn
jinja2
jinja2 >= 3.0.0
# Flash attention v2