From 81f6c38df17c5ef50f82ed195dd73fe9162ea07a Mon Sep 17 00:00:00 2001 From: Daniel Eder Date: Sat, 27 Sep 2025 19:22:24 +0200 Subject: [PATCH] refactor: moved intelligent communication agent to its own file --- agent.py | 83 --------------------------------------- app.py | 15 +++++--- intelligent_agent.py | 92 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 89 deletions(-) create mode 100644 intelligent_agent.py diff --git a/agent.py b/agent.py index 747d9f7..9d59cf8 100644 --- a/agent.py +++ b/agent.py @@ -1,17 +1,11 @@ from __future__ import annotations import re -import sys from abc import ABC, abstractmethod from collections import deque from dataclasses import dataclass, field from typing import Deque, Optional, Pattern, Set, Tuple -try: - from litellm import completion -except ImportError: # pragma: no cover - optional dependency - completion = None # type: ignore[assignment] - class Agent(ABC): """Interface for autonomous Telnet actors.""" @@ -121,80 +115,3 @@ class CommunicationAgent(Agent): print(f"[Agent] Replying to {player}") return reply - -@dataclass -class IntelligentCommunicationAgent(Agent): - """Agent that uses a language model to answer private tells.""" - - model: str = "mistral/mistral-tiny" - system_prompt: str = ( - "Du bist Mistle, ein hilfsbereiter MUD-Bot. " - "Antworte freundlich und knapp in deutscher Sprache." - ) - temperature: float = 0.7 - max_output_tokens: int = 120 - fallback_reply: str = "Hallo! Ich bin Mistle und ein Bot." - tell_pattern: Pattern[str] = field( - default_factory=lambda: re.compile( - r"^(?P[^\s]+) teilt (d|D)ir mit: (?P.+)$", - re.MULTILINE, - ) - ) - last_output: str = field(default="", init=False) - pending_replies: Deque[Tuple[str, str]] = field(default_factory=deque, init=False) - - def observe(self, output: str) -> None: - if not output: - return - self.last_output = output - for match in self.tell_pattern.finditer(output): - player = match.group("player").strip() - message = match.group("message").strip() - if not player: - continue - self.pending_replies.append((player, message)) - print(f"[Agent] Received message from {player}: {message}") - - def decide(self) -> Optional[str]: - if not self.pending_replies: - return None - player, message = self.pending_replies.popleft() - reply_text = self._generate_reply(player, message) - reply = f"teile {player} mit {reply_text}" - print(f"[Agent] Replying to {player} with model output") - return reply - - def _generate_reply(self, player: str, message: str) -> str: - if completion is None: - print( - "[Agent] litellm is not installed; falling back to default reply", - file=sys.stderr, - ) - return self.fallback_reply - - try: - response = completion( - model=self.model, - messages=[ - {"role": "system", "content": self.system_prompt}, - { - "role": "user", - "content": ( - f"Spieler {player} schreibt: {message}\n" - "Formuliere eine kurze, freundliche Antwort." - ), - }, - ], - temperature=self.temperature, - max_tokens=self.max_output_tokens, - ) - except Exception as exc: # pragma: no cover - network/runtime errors - print(f"[Agent] Model call failed: {exc}", file=sys.stderr) - return self.fallback_reply - - try: - content = response["choices"][0]["message"]["content"].strip() - except (KeyError, IndexError, TypeError): # pragma: no cover - defensive - return self.fallback_reply - - return content or self.fallback_reply diff --git a/app.py b/app.py index 705b2c8..1401753 100644 --- a/app.py +++ b/app.py @@ -153,16 +153,19 @@ def build_agent(agent_spec: str) -> Agent: return SimpleAgent() builtin_agents = { - "explore": "ExploreAgent", - "communication": "CommunicationAgent", - "intelligent": "IntelligentCommunicationAgent", - "intelligentcommunication": "IntelligentCommunicationAgent", + "explore": ("agent", "ExploreAgent"), + "communication": ("agent", "CommunicationAgent"), + "intelligent": ("intelligent_agent", "IntelligentCommunicationAgent"), + "intelligentcommunication": ( + "intelligent_agent", + "IntelligentCommunicationAgent", + ), } if key in builtin_agents: - class_name = builtin_agents[key] + module_name, class_name = builtin_agents[key] try: - module = import_module("agent") + module = import_module(module_name) agent_cls = getattr(module, class_name) except AttributeError as exc: # pragma: no cover - optional dependency raise RuntimeError(f"{class_name} is not available in agent module") from exc diff --git a/intelligent_agent.py b/intelligent_agent.py new file mode 100644 index 0000000..6e2c734 --- /dev/null +++ b/intelligent_agent.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +import re +import sys +from collections import deque +from dataclasses import dataclass, field +from typing import Deque, Optional, Pattern, Tuple + +try: + from litellm import completion +except ImportError: # pragma: no cover - optional dependency + completion = None # type: ignore[assignment] + +from agent import Agent + + +@dataclass +class IntelligentCommunicationAgent(Agent): + """Agent that uses a language model to answer private tells.""" + + model: str = "mistral/mistral-tiny" + system_prompt: str = ( + "Du bist Mistle, ein hilfsbereiter MUD-Bot. " + "Antworte freundlich und knapp in deutscher Sprache." + ) + temperature: float = 0.7 + max_output_tokens: int = 120 + fallback_reply: str = "Hallo! Ich bin Mistle und ein Bot." + tell_pattern: Pattern[str] = field( + default_factory=lambda: re.compile( + r"^(?P[^\s]+) teilt (d|D)ir mit: (?P.+)$", + re.MULTILINE, + ) + ) + last_output: str = field(default="", init=False) + pending_replies: Deque[Tuple[str, str]] = field(default_factory=deque, init=False) + + def observe(self, output: str) -> None: + if not output: + return + self.last_output = output + for match in self.tell_pattern.finditer(output): + player = match.group("player").strip() + message = match.group("message").strip() + if not player: + continue + self.pending_replies.append((player, message)) + print(f"[Agent] Received message from {player}: {message}") + + def decide(self) -> Optional[str]: + if not self.pending_replies: + return None + player, message = self.pending_replies.popleft() + reply_text = self._generate_reply(player, message) + reply = f"teile {player} mit {reply_text}" + print(f"[Agent] Replying to {player} with model output") + return reply + + def _generate_reply(self, player: str, message: str) -> str: + if completion is None: + print( + "[Agent] litellm is not installed; falling back to default reply", + file=sys.stderr, + ) + return self.fallback_reply + + try: + response = completion( + model=self.model, + messages=[ + {"role": "system", "content": self.system_prompt}, + { + "role": "user", + "content": ( + f"Spieler {player} schreibt: {message}\n" + "Formuliere eine kurze, freundliche Antwort." + ), + }, + ], + temperature=self.temperature, + max_tokens=self.max_output_tokens, + ) + except Exception as exc: # pragma: no cover - network/runtime errors + print(f"[Agent] Model call failed: {exc}", file=sys.stderr) + return self.fallback_reply + + try: + content = response["choices"][0]["message"]["content"].strip() + except (KeyError, IndexError, TypeError): # pragma: no cover - defensive + return self.fallback_reply + + return content or self.fallback_reply