Remove LLMConfig, pass LLM settings directly to LLMClient

LLMConfig was an unnecessary intermediary — LLMClient now takes
model, host, and port directly as constructor args.

https://claude.ai/code/session_01AKXQBuVBsW7J1YbukDiQ7A
This commit is contained in:
Claude
2026-03-08 20:52:45 +00:00
committed by Richie Cahill
parent ab2d8dbd51
commit f11c9bed58
4 changed files with 16 additions and 38 deletions

View File

@@ -62,20 +62,6 @@ class InventoryUpdate(BaseModel):
source_type: str = "" # "receipt_photo" or "text_list"
class LLMConfig(BaseModel):
"""Configuration for an LLM backend."""
model: str
host: str
port: int = 11434
temperature: float = 0.1
@property
def base_url(self) -> str:
"""Ollama API base URL."""
return f"http://{self.host}:{self.port}"
class BotConfig(BaseModel):
"""Top-level bot configuration."""