From 10d9419f908321f5fdc9c9054a41ad187f37256f Mon Sep 17 00:00:00 2001 From: kingbri Date: Thu, 29 Aug 2024 21:12:36 -0400 Subject: [PATCH] Model: Add BOS token to prompt logs If add_bos_token is enabled, the BOS token gets appended to the logged prompt if logging is enabled. Signed-off-by: kingbri --- backends/exllamav2/model.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/backends/exllamav2/model.py b/backends/exllamav2/model.py index e991a87..9fe9519 100644 --- a/backends/exllamav2/model.py +++ b/backends/exllamav2/model.py @@ -1163,8 +1163,12 @@ class ExllamaV2Container: # This is an inverse of skip_special_tokens decode_special_tokens = unwrap(not kwargs.get("skip_special_tokens"), False) - # Log prompt to console - log_prompt(prompt, request_id, negative_prompt) + # Log prompt to console. Add the BOS token if specified + log_prompt( + f"{self.tokenizer.bos_token if add_bos_token else ''}{prompt}", + request_id, + negative_prompt + ) # Create and add a new job # Don't use the request ID here as there can be multiple jobs per request