diff --git a/README.md b/README.md index 1c77b43..b31223c 100644 --- a/README.md +++ b/README.md @@ -90,6 +90,12 @@ All variables can be placed in the `.env` file (one `KEY=value` per line) or pro | `MISTLE_LLM_MODEL` | ❌ | Override the `litellm` model used by the intelligent tool (defaults to `mistral/mistral-small-2407`). | | `MISTRAL_API_KEY` | ❌ | API key used by `IntelligentCommunicationTool` (via `litellm`) when calling the `mistral/mistral-small-2407` model. | +## System Prompt File + +- LLM-backed components now load their system prompt from `SYSTEM.md` in the project root. +- This applies to both the intelligent agent and the intelligent communication tool. +- If `SYSTEM.md` is missing or empty, the app falls back to built-in defaults and logs a warning. + ## Tool Development - Implement new tools by subclassing `tools.Tool` and overriding `observe()` and `decide()`. diff --git a/SYSTEM.md b/SYSTEM.md new file mode 100644 index 0000000..a514404 --- /dev/null +++ b/SYSTEM.md @@ -0,0 +1,18 @@ +You are Mistle, a autonomous Agent playing the German-speaking MUD silberland. +Your goal is to level up as much as possible and explore the world. + +## Behavior +- Be concise, friendly, and informal. +- When replying to players, answer in German. +- Keep answers practical and in-character for a MUD helper bot. +- Use emoticons only when appropriate: :) ;) 8) :( ;( :P X) XD :D + +## Tool Usage +Use tools only when appropriate. Think how you can solve problems without +using a tool. Do not use the "exploration" tool multiple times in a row. + +## Mud Commands +The following MUD commands may be helpful to you +schau - get a description of the current environment +info - examine your own stats +hilfe - get additional help in general and about specific commands \ No newline at end of file diff --git a/intelligent_agent.py b/intelligent_agent.py index 9d15aa6..30baba2 100644 --- a/intelligent_agent.py +++ b/intelligent_agent.py @@ -14,19 +14,24 @@ except ImportError: # pragma: no cover completion = None # type: ignore[assignment] from agents import Agent +from system_prompt import load_system_prompt ToolInvoker = Callable[[str], bool] CommandExecutor = Callable[[str], None] +DEFAULT_AGENT_SYSTEM_PROMPT = ( + "You are Mistle, a helpful MUD assistant. " + "You can either call tools or send plain commands to the MUD." +) + @dataclass class IntelligentAgent(Agent): """LLM-driven agent that decides between tools and raw commands.""" model: str = "mistral/mistral-large-2407" - system_prompt: str = ( - "You are Mistle, a helpful MUD assistant. " - "You can either call tools or send plain commands to the MUD." + system_prompt: str = field( + default_factory=lambda: load_system_prompt(default=DEFAULT_AGENT_SYSTEM_PROMPT) ) temperature: float = 0.7 max_output_tokens: int = 200 diff --git a/intelligent_tool.py b/intelligent_tool.py index f359f83..51f7ee3 100644 --- a/intelligent_tool.py +++ b/intelligent_tool.py @@ -11,20 +11,27 @@ try: except ImportError: # pragma: no cover - optional dependency completion = None # type: ignore[assignment] +from system_prompt import load_system_prompt from tools import Tool +DEFAULT_COMMUNICATION_SYSTEM_PROMPT = ( + "Du bist Mistle, ein hilfsbereiter MUD-Bot. " + "Antworte freundlich und knapp in deutscher Sprache." + "Sprich informell und freundlich." + "Antworte immer auf Deutsch." + "Verwende emoticons, wenn es angebracht ist: :) ;) 8) :( ;( :P X) XD :D" +) + @dataclass class IntelligentCommunicationTool(Tool): """Tool that uses a language model to answer private tells.""" model: str = "mistral/mistral-tiny" - system_prompt: str = ( - "Du bist Mistle, ein hilfsbereiter MUD-Bot. " - "Antworte freundlich und knapp in deutscher Sprache." - "Sprich informell und freundlich." - "Antworte immer auf Deutsch." - "Verwende emoticons, wenn es angebracht ist: :) ;) 8) :( ;( :P X) XD :D" + system_prompt: str = field( + default_factory=lambda: load_system_prompt( + default=DEFAULT_COMMUNICATION_SYSTEM_PROMPT + ) ) temperature: float = 0.7 max_output_tokens: int = 120 diff --git a/system_prompt.py b/system_prompt.py new file mode 100644 index 0000000..097689d --- /dev/null +++ b/system_prompt.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import sys +from pathlib import Path + +_WARNED_PATHS: set[Path] = set() + + +def load_system_prompt(*, default: str, path: str = "SYSTEM.md") -> str: + """Load the system prompt from disk, with a fallback default.""" + prompt_path = Path(path) + try: + content = prompt_path.read_text(encoding="utf-8").strip() + except FileNotFoundError: + _warn_once(prompt_path, "not found") + return default + except OSError as exc: # pragma: no cover - environment specific + _warn_once(prompt_path, f"unreadable ({exc})") + return default + + if not content: + _warn_once(prompt_path, "empty") + return default + return content + + +def _warn_once(path: Path, reason: str) -> None: + if path in _WARNED_PATHS: + return + _WARNED_PATHS.add(path) + print( + f"[Prompt] {path} is {reason}; using built-in default prompt.", + file=sys.stderr, + )