feat: configurable llm

This commit is contained in:
Daniel Eder 2025-09-27 20:53:10 +02:00
parent bd884d938f
commit 2b419a41c9
5 changed files with 32 additions and 9 deletions

3
.env
View file

@ -6,4 +6,5 @@ MISTLE_LOGIN_PROMPT=Wie heisst Du denn ("neu" fuer neuen Spieler) ?
MISTLE_EXIT_COMMAND=schlaf ein
MISTLE_AGENT_MODE=true
MISTLE_AGENT=intelligent
MISTRAL_API_KEY=eohe7Di7vuznINFqW5VyQIPenbyPX3le
MISTRAL_API_KEY=eohe7Di7vuznINFqW5VyQIPenbyPX3le
MISTLE_LLM_MODEL=mistral/mistral-small

3
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,3 @@
{
"wolf.disableHotModeWarning": true
}

View file

@ -55,6 +55,7 @@ All variables can be placed in the `.env` file (one `KEY=value` per line) or pro
| `MISTLE_EXIT_COMMAND` | ❌ | Command issued during graceful shutdown (after pressing `Ctrl-C`). Useful for `quit`/`save` macros. |
| `MISTLE_AGENT_MODE` | ❌ | Enable full-time agent thread when set to truthy values (`1`, `true`, `yes`, `on`). Defaults to interactive-only mode. |
| `MISTLE_AGENT` | ❌ | Select which agent class to instantiate when agent mode is active. Accepted values: `simple` (default), `explore`, `communication`, `intelligent`/`intelligentcommunication` (LLM-backed), or custom spec `module:ClassName`. |
| `MISTLE_LLM_MODEL` | ❌ | Override the `litellm` model used by the intelligent agent (defaults to `mistral/mistral-small-2407`). |
| `MISTRAL_API_KEY` | ❌ | API key used by `IntelligentCommunicationAgent` (via `litellm`) when calling the `mistral/mistral-small-2407` model. |
## Agent Development

26
app.py
View file

@ -153,23 +153,32 @@ def build_agent(agent_spec: str) -> Agent:
return SimpleAgent()
builtin_agents = {
"explore": ("agent", "ExploreAgent"),
"communication": ("agent", "CommunicationAgent"),
"intelligent": ("intelligent_agent", "IntelligentCommunicationAgent"),
"explore": ("agent", "ExploreAgent", {}),
"communication": ("agent", "CommunicationAgent", {}),
"intelligent": (
"intelligent_agent",
"IntelligentCommunicationAgent",
{"model": os.environ.get("MISTLE_LLM_MODEL", "mistral/mistral-tiny")},
),
"intelligentcommunication": (
"intelligent_agent",
"IntelligentCommunicationAgent",
{"model": os.environ.get("MISTLE_LLM_MODEL", "mistral/mistral-tiny")},
),
}
if key in builtin_agents:
module_name, class_name = builtin_agents[key]
module_name, class_name, kwargs = builtin_agents[key]
try:
module = import_module(module_name)
agent_cls = getattr(module, class_name)
except AttributeError as exc: # pragma: no cover - optional dependency
raise RuntimeError(f"{class_name} is not available in agent module") from exc
return _instantiate_agent(agent_cls, normalized)
agent = _instantiate_agent(agent_cls, normalized, kwargs)
model_name = kwargs.get("model") if kwargs else None
if model_name:
print(f"[Agent] Using LLM model: {model_name}")
return agent
if ":" in normalized:
module_name, class_name = normalized.split(":", 1)
@ -182,11 +191,14 @@ def build_agent(agent_spec: str) -> Agent:
raise RuntimeError(f"Unknown agent spec '{agent_spec}'.")
def _instantiate_agent(agent_cls: Type[Agent], agent_spec: str) -> Agent:
def _instantiate_agent(
agent_cls: Type[Agent], agent_spec: str, kwargs: Optional[dict] = None
) -> Agent:
if not issubclass(agent_cls, Agent):
raise RuntimeError(f"{agent_spec} is not an Agent subclass")
try:
return agent_cls()
kwargs = kwargs or {}
return agent_cls(**kwargs)
except TypeError as exc:
raise RuntimeError(f"Failed to instantiate {agent_spec}: {exc}") from exc

View file

@ -54,7 +54,7 @@ class IntelligentCommunicationAgent(Agent):
if not self.pending_replies:
return None
player, _ = self.pending_replies.popleft()
reply_text = self._generate_reply(player)
reply_text = self._sanitize_reply(self._generate_reply(player))
self._append_history(player, "assistant", reply_text)
reply = f"teile {player} mit {reply_text}"
print(f"[Agent] Replying to {player} with model output")
@ -86,6 +86,12 @@ class IntelligentCommunicationAgent(Agent):
return content or self.fallback_reply
def _sanitize_reply(self, text: str) -> str:
if not text:
return self.fallback_reply
collapsed = " ".join(text.split())
return collapsed or self.fallback_reply
def _build_messages(self, player: str) -> list[dict[str, str]]:
history = self.conversation_history.get(player)
messages: list[dict[str, str]] = [{"role": "system", "content": self.system_prompt}]