feat: intelligent ai enabled communication agent

This commit is contained in:
Daniel Eder 2025-09-27 17:21:25 +02:00
parent 1daffc871e
commit 77fcb26a38
6 changed files with 1968 additions and 8 deletions

3
.env
View file

@ -5,4 +5,5 @@ MISTLE_PASSWORD=sl-mudbot
MISTLE_LOGIN_PROMPT=Wie heisst Du denn ("neu" fuer neuen Spieler) ?
MISTLE_EXIT_COMMAND=schlaf ein
MISTLE_AGENT_MODE=true
MISTLE_AGENT=communication
MISTLE_AGENT=intelligent
MISTRAL_API_KEY=eohe7Di7vuznINFqW5VyQIPenbyPX3le

View file

@ -8,7 +8,7 @@ Python-based Telnet helper for connecting to MUD servers, handling login flows,
- Loads credentials and connection settings from a local `.env` file.
- Interactive console session that mirrors server output and lets you type commands directly.
- Optional always-on agent mode plus an on-demand `#execute <agent>` escape hatch for ad-hoc automations.
- Built-in agents (`SimpleAgent`, `ExploreAgent`) with a pluggable interface for custom behaviours.
- Built-in agents (`SimpleAgent`, `ExploreAgent`, `CommunicationAgent`, `IntelligentCommunicationAgent`) with a pluggable interface for custom behaviours.
## Requirements
@ -54,7 +54,8 @@ All variables can be placed in the `.env` file (one `KEY=value` per line) or pro
| `MISTLE_LOGIN_PROMPT` | ❌ | Prompt string that signals the client to send credentials (e.g., `"Name:"`). When omitted, the client just waits for the initial banner. |
| `MISTLE_EXIT_COMMAND` | ❌ | Command issued during graceful shutdown (after pressing `Ctrl-C`). Useful for `quit`/`save` macros. |
| `MISTLE_AGENT_MODE` | ❌ | Enable full-time agent thread when set to truthy values (`1`, `true`, `yes`, `on`). Defaults to interactive-only mode. |
| `MISTLE_AGENT` | ❌ | Select which agent class to instantiate when agent mode is active. Accepted values: `simple` (default), `explore` (requires `ExploreAgent` inside `agent.py`), or custom spec `module:ClassName`. |
| `MISTLE_AGENT` | ❌ | Select which agent class to instantiate when agent mode is active. Accepted values: `simple` (default), `explore`, `communication`, `intelligent`/`intelligentcommunication` (LLM-backed), or custom spec `module:ClassName`. |
| `MISTRAL_API_KEY` | ❌ | API key used by `IntelligentCommunicationAgent` (via `litellm`) when calling the `mistral/mistral-small-2407` model. |
## Agent Development
@ -65,11 +66,12 @@ All variables can be placed in the `.env` file (one `KEY=value` per line) or pro
- `observe(output)` receives the latest server text; `decide()` returns the next command string or `None` to stay idle.
- Commands issued by the agent are throttled to one per second so manual commands can still interleave smoothly.
- `ExploreAgent` showcases a richer workflow: it sends `schau`, identifies German nouns, inspects each with `untersuche`, and prints `[Agent]` progress updates like `Explored 3/7 — untersuche Tisch`.
- `CommunicationAgent` auto-replies to every direct tell with a canned greeting, while `IntelligentCommunicationAgent` routes each tell through `litellm` (default model `mistral/mistral-small-2407`) to craft a contextual answer.
## On-Demand Agents
- When `MISTLE_AGENT_MODE` is **off**, you can trigger an ephemeral agent at any time with `#execute <agent_spec>`.
- The syntax accepts the same values as `MISTLE_AGENT` and reuses the `build_agent` helper, so `#execute simple`, `#execute explore`, or `#execute mypackage.mymodule:CustomAgent` are all valid.
- The syntax accepts the same values as `MISTLE_AGENT` and reuses the `build_agent` helper, so `#execute simple`, `#execute explore`, `#execute intelligent`, or `#execute mypackage.mymodule:CustomAgent` are all valid.
- On-demand runs share the current session, respect the one-command-per-second limit, and stop automatically after a few seconds of inactivity.
## Danger Zone

View file

@ -1,11 +1,17 @@
from __future__ import annotations
import re
import sys
from abc import ABC, abstractmethod
from collections import deque
from dataclasses import dataclass, field
from typing import Deque, Optional, Pattern, Set, Tuple
try:
from litellm import completion
except ImportError: # pragma: no cover - optional dependency
completion = None # type: ignore[assignment]
class Agent(ABC):
"""Interface for autonomous Telnet actors."""
@ -114,3 +120,81 @@ class CommunicationAgent(Agent):
reply = self.reply_template.format(player=player)
print(f"[Agent] Replying to {player}")
return reply
@dataclass
class IntelligentCommunicationAgent(Agent):
"""Agent that uses a language model to answer private tells."""
model: str = "mistral/mistral-tiny"
system_prompt: str = (
"Du bist Mistle, ein hilfsbereiter MUD-Bot. "
"Antworte freundlich und knapp in deutscher Sprache."
)
temperature: float = 0.7
max_output_tokens: int = 120
fallback_reply: str = "Hallo! Ich bin Mistle und ein Bot."
tell_pattern: Pattern[str] = field(
default_factory=lambda: re.compile(
r"^(?P<player>[^\s]+) teilt (d|D)ir mit: (?P<message>.+)$",
re.MULTILINE,
)
)
last_output: str = field(default="", init=False)
pending_replies: Deque[Tuple[str, str]] = field(default_factory=deque, init=False)
def observe(self, output: str) -> None:
if not output:
return
self.last_output = output
for match in self.tell_pattern.finditer(output):
player = match.group("player").strip()
message = match.group("message").strip()
if not player:
continue
self.pending_replies.append((player, message))
print(f"[Agent] Received message from {player}: {message}")
def decide(self) -> Optional[str]:
if not self.pending_replies:
return None
player, message = self.pending_replies.popleft()
reply_text = self._generate_reply(player, message)
reply = f"teile {player} mit {reply_text}"
print(f"[Agent] Replying to {player} with model output")
return reply
def _generate_reply(self, player: str, message: str) -> str:
if completion is None:
print(
"[Agent] litellm is not installed; falling back to default reply",
file=sys.stderr,
)
return self.fallback_reply
try:
response = completion(
model=self.model,
messages=[
{"role": "system", "content": self.system_prompt},
{
"role": "user",
"content": (
f"Spieler {player} schreibt: {message}\n"
"Formuliere eine kurze, freundliche Antwort."
),
},
],
temperature=self.temperature,
max_tokens=self.max_output_tokens,
)
except Exception as exc: # pragma: no cover - network/runtime errors
print(f"[Agent] Model call failed: {exc}", file=sys.stderr)
return self.fallback_reply
try:
content = response["choices"][0]["message"]["content"].strip()
except (KeyError, IndexError, TypeError): # pragma: no cover - defensive
return self.fallback_reply
return content or self.fallback_reply

14
app.py
View file

@ -152,12 +152,20 @@ def build_agent(agent_spec: str) -> Agent:
if key == "simple":
return SimpleAgent()
if key in {"explore", "communication"}:
builtin_agents = {
"explore": "ExploreAgent",
"communication": "CommunicationAgent",
"intelligent": "IntelligentCommunicationAgent",
"intelligentcommunication": "IntelligentCommunicationAgent",
}
if key in builtin_agents:
class_name = builtin_agents[key]
try:
module = import_module("agent")
agent_cls = getattr(module, f"{key.capitalize()}Agent")
agent_cls = getattr(module, class_name)
except AttributeError as exc: # pragma: no cover - optional dependency
raise RuntimeError(f"{key.capitalize()}Agent is not available in agent module") from exc
raise RuntimeError(f"{class_name} is not available in agent module") from exc
return _instantiate_agent(agent_cls, normalized)
if ":" in normalized:

View file

@ -4,4 +4,6 @@ version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.9"
dependencies = []
dependencies = [
"litellm>=1.77.4",
]

1863
uv.lock generated Normal file

File diff suppressed because it is too large Load diff