refactor: moved intelligent communication agent to its own file

This commit is contained in:
Daniel Eder 2025-09-27 19:22:24 +02:00
parent 77fcb26a38
commit 81f6c38df1
3 changed files with 101 additions and 89 deletions

View file

@ -1,17 +1,11 @@
from __future__ import annotations
import re
import sys
from abc import ABC, abstractmethod
from collections import deque
from dataclasses import dataclass, field
from typing import Deque, Optional, Pattern, Set, Tuple
try:
from litellm import completion
except ImportError: # pragma: no cover - optional dependency
completion = None # type: ignore[assignment]
class Agent(ABC):
"""Interface for autonomous Telnet actors."""
@ -121,80 +115,3 @@ class CommunicationAgent(Agent):
print(f"[Agent] Replying to {player}")
return reply
@dataclass
class IntelligentCommunicationAgent(Agent):
"""Agent that uses a language model to answer private tells."""
model: str = "mistral/mistral-tiny"
system_prompt: str = (
"Du bist Mistle, ein hilfsbereiter MUD-Bot. "
"Antworte freundlich und knapp in deutscher Sprache."
)
temperature: float = 0.7
max_output_tokens: int = 120
fallback_reply: str = "Hallo! Ich bin Mistle und ein Bot."
tell_pattern: Pattern[str] = field(
default_factory=lambda: re.compile(
r"^(?P<player>[^\s]+) teilt (d|D)ir mit: (?P<message>.+)$",
re.MULTILINE,
)
)
last_output: str = field(default="", init=False)
pending_replies: Deque[Tuple[str, str]] = field(default_factory=deque, init=False)
def observe(self, output: str) -> None:
if not output:
return
self.last_output = output
for match in self.tell_pattern.finditer(output):
player = match.group("player").strip()
message = match.group("message").strip()
if not player:
continue
self.pending_replies.append((player, message))
print(f"[Agent] Received message from {player}: {message}")
def decide(self) -> Optional[str]:
if not self.pending_replies:
return None
player, message = self.pending_replies.popleft()
reply_text = self._generate_reply(player, message)
reply = f"teile {player} mit {reply_text}"
print(f"[Agent] Replying to {player} with model output")
return reply
def _generate_reply(self, player: str, message: str) -> str:
if completion is None:
print(
"[Agent] litellm is not installed; falling back to default reply",
file=sys.stderr,
)
return self.fallback_reply
try:
response = completion(
model=self.model,
messages=[
{"role": "system", "content": self.system_prompt},
{
"role": "user",
"content": (
f"Spieler {player} schreibt: {message}\n"
"Formuliere eine kurze, freundliche Antwort."
),
},
],
temperature=self.temperature,
max_tokens=self.max_output_tokens,
)
except Exception as exc: # pragma: no cover - network/runtime errors
print(f"[Agent] Model call failed: {exc}", file=sys.stderr)
return self.fallback_reply
try:
content = response["choices"][0]["message"]["content"].strip()
except (KeyError, IndexError, TypeError): # pragma: no cover - defensive
return self.fallback_reply
return content or self.fallback_reply

15
app.py
View file

@ -153,16 +153,19 @@ def build_agent(agent_spec: str) -> Agent:
return SimpleAgent()
builtin_agents = {
"explore": "ExploreAgent",
"communication": "CommunicationAgent",
"intelligent": "IntelligentCommunicationAgent",
"intelligentcommunication": "IntelligentCommunicationAgent",
"explore": ("agent", "ExploreAgent"),
"communication": ("agent", "CommunicationAgent"),
"intelligent": ("intelligent_agent", "IntelligentCommunicationAgent"),
"intelligentcommunication": (
"intelligent_agent",
"IntelligentCommunicationAgent",
),
}
if key in builtin_agents:
class_name = builtin_agents[key]
module_name, class_name = builtin_agents[key]
try:
module = import_module("agent")
module = import_module(module_name)
agent_cls = getattr(module, class_name)
except AttributeError as exc: # pragma: no cover - optional dependency
raise RuntimeError(f"{class_name} is not available in agent module") from exc

92
intelligent_agent.py Normal file
View file

@ -0,0 +1,92 @@
from __future__ import annotations
import re
import sys
from collections import deque
from dataclasses import dataclass, field
from typing import Deque, Optional, Pattern, Tuple
try:
from litellm import completion
except ImportError: # pragma: no cover - optional dependency
completion = None # type: ignore[assignment]
from agent import Agent
@dataclass
class IntelligentCommunicationAgent(Agent):
"""Agent that uses a language model to answer private tells."""
model: str = "mistral/mistral-tiny"
system_prompt: str = (
"Du bist Mistle, ein hilfsbereiter MUD-Bot. "
"Antworte freundlich und knapp in deutscher Sprache."
)
temperature: float = 0.7
max_output_tokens: int = 120
fallback_reply: str = "Hallo! Ich bin Mistle und ein Bot."
tell_pattern: Pattern[str] = field(
default_factory=lambda: re.compile(
r"^(?P<player>[^\s]+) teilt (d|D)ir mit: (?P<message>.+)$",
re.MULTILINE,
)
)
last_output: str = field(default="", init=False)
pending_replies: Deque[Tuple[str, str]] = field(default_factory=deque, init=False)
def observe(self, output: str) -> None:
if not output:
return
self.last_output = output
for match in self.tell_pattern.finditer(output):
player = match.group("player").strip()
message = match.group("message").strip()
if not player:
continue
self.pending_replies.append((player, message))
print(f"[Agent] Received message from {player}: {message}")
def decide(self) -> Optional[str]:
if not self.pending_replies:
return None
player, message = self.pending_replies.popleft()
reply_text = self._generate_reply(player, message)
reply = f"teile {player} mit {reply_text}"
print(f"[Agent] Replying to {player} with model output")
return reply
def _generate_reply(self, player: str, message: str) -> str:
if completion is None:
print(
"[Agent] litellm is not installed; falling back to default reply",
file=sys.stderr,
)
return self.fallback_reply
try:
response = completion(
model=self.model,
messages=[
{"role": "system", "content": self.system_prompt},
{
"role": "user",
"content": (
f"Spieler {player} schreibt: {message}\n"
"Formuliere eine kurze, freundliche Antwort."
),
},
],
temperature=self.temperature,
max_tokens=self.max_output_tokens,
)
except Exception as exc: # pragma: no cover - network/runtime errors
print(f"[Agent] Model call failed: {exc}", file=sys.stderr)
return self.fallback_reply
try:
content = response["choices"][0]["message"]["content"].strip()
except (KeyError, IndexError, TypeError): # pragma: no cover - defensive
return self.fallback_reply
return content or self.fallback_reply