tabbyAPI-ollama/utils.py
kingbri 8ba3bfa6b3 API: Fix load exception handling
Models do not fully unload if an exception is caught in load. Therefore,
leave it to the client to unload on cancel.

Also add handlers in the event a SSE stream is cancelled. These packets
can't be sent back to the client since the client has severed the
connection, so print them in terminal.

Signed-off-by: kingbri <bdashore3@proton.me>
2023-12-05 00:23:15 -05:00

32 lines
832 B
Python

import traceback
from pydantic import BaseModel
from typing import Optional
# Wrapper callback for load progress
def load_progress(module, modules):
yield module, modules
# Common error types
class TabbyGeneratorErrorMessage(BaseModel):
message: str
trace: Optional[str] = None
class TabbyGeneratorError(BaseModel):
error: TabbyGeneratorErrorMessage
def get_generator_error(message: str):
error_message = TabbyGeneratorErrorMessage(
message = message,
trace = traceback.format_exc()
)
generator_error = TabbyGeneratorError(
error = error_message
)
# Log and send the exception
print(f"\n{generator_error.error.trace}")
return get_sse_packet(generator_error.json(ensure_ascii = False))
def get_sse_packet(json_data: str):
return f"data: {json_data}\n\n"