Config: Deep merge model overrides

Anything below the first level of kwargs was not being merged properly.
A more bulletproof solution would be to refactor the loading code
to separate draft and normal model parameters.

Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
kingbri 2025-07-03 12:17:09 -04:00
parent 0152a1665b
commit d339139fb6
3 changed files with 22 additions and 10 deletions

View file

@ -18,7 +18,7 @@ from common.networking import handle_request_error
from common.tabby_config import config from common.tabby_config import config
from common.optional_dependencies import dependencies from common.optional_dependencies import dependencies
from common.transformers_utils import HFModel from common.transformers_utils import HFModel
from common.utils import unwrap from common.utils import deep_merge_dict, unwrap
# Global variables for model container # Global variables for model container
container: Optional[BaseModelContainer] = None container: Optional[BaseModelContainer] = None
@ -104,7 +104,8 @@ async def apply_inline_overrides(model_dir: pathlib.Path, **kwargs):
overrides["draft_model"] = {**draft_inline_config} overrides["draft_model"] = {**draft_inline_config}
# Merge the override and model kwargs # Merge the override and model kwargs
merged_kwargs = {**overrides, **kwargs} # No need to preserve the original overrides dict
merged_kwargs = deep_merge_dict(overrides, kwargs)
return merged_kwargs return merged_kwargs

View file

@ -11,7 +11,7 @@ from ruamel.yaml.comments import CommentedMap, CommentedSeq
from ruamel.yaml.scalarstring import PreservedScalarString from ruamel.yaml.scalarstring import PreservedScalarString
from common.config_models import BaseConfigModel, TabbyConfigModel from common.config_models import BaseConfigModel, TabbyConfigModel
from common.utils import merge_dicts, filter_none_values, unwrap from common.utils import deep_merge_dicts, filter_none_values, unwrap
yaml = YAML(typ=["rt", "safe"]) yaml = YAML(typ=["rt", "safe"])
@ -36,7 +36,7 @@ class TabbyConfig(TabbyConfigModel):
# Remove None (aka unset) values from the configs and merge them together # Remove None (aka unset) values from the configs and merge them together
# This should be less expensive than pruning the entire merged dictionary # This should be less expensive than pruning the entire merged dictionary
configs = filter_none_values(configs) configs = filter_none_values(configs)
merged_config = merge_dicts(*configs) merged_config = deep_merge_dicts(*configs)
# validate and update config # validate and update config
merged_config_model = TabbyConfigModel.model_validate(merged_config) merged_config_model = TabbyConfigModel.model_validate(merged_config)

View file

@ -32,21 +32,32 @@ def filter_none_values(collection: Union[dict, list]) -> Union[dict, list]:
return collection return collection
def merge_dict(dict1: Dict, dict2: Dict) -> Dict: def deep_merge_dict(dict1: Dict, dict2: Dict, copy: bool = False) -> Dict:
"""Merge 2 dictionaries""" """
Merge 2 dictionaries. If copy is true, the original dictionary isn't modified.
"""
if copy:
dict1 = dict1.copy()
for key, value in dict2.items(): for key, value in dict2.items():
if isinstance(value, dict) and key in dict1 and isinstance(dict1[key], dict): if isinstance(value, dict) and key in dict1 and isinstance(dict1[key], dict):
merge_dict(dict1[key], value) deep_merge_dict(dict1[key], value, copy=False)
else: else:
dict1[key] = value dict1[key] = value
return dict1 return dict1
def merge_dicts(*dicts: Dict) -> Dict: def deep_merge_dicts(*dicts: Dict) -> Dict:
"""Merge an arbitrary amount of dictionaries""" """
Merge an arbitrary amount of dictionaries.
We wanna do in-place modification for each level, so do not copy.
"""
result = {} result = {}
for dictionary in dicts: for dictionary in dicts:
result = merge_dict(result, dictionary) result = deep_merge_dict(result, dictionary)
return result return result