feat: system prompt stored in SYSTEM.md

This commit is contained in:
Daniel Eder 2026-02-09 10:03:10 +01:00
parent 2a6953db9d
commit 9af879eadc
No known key found for this signature in database
GPG key ID: CE7446DFCE599F32
5 changed files with 79 additions and 9 deletions

View file

@ -90,6 +90,12 @@ All variables can be placed in the `.env` file (one `KEY=value` per line) or pro
| `MISTLE_LLM_MODEL` | ❌ | Override the `litellm` model used by the intelligent tool (defaults to `mistral/mistral-small-2407`). |
| `MISTRAL_API_KEY` | ❌ | API key used by `IntelligentCommunicationTool` (via `litellm`) when calling the `mistral/mistral-small-2407` model. |
## System Prompt File
- LLM-backed components now load their system prompt from `SYSTEM.md` in the project root.
- This applies to both the intelligent agent and the intelligent communication tool.
- If `SYSTEM.md` is missing or empty, the app falls back to built-in defaults and logs a warning.
## Tool Development
- Implement new tools by subclassing `tools.Tool` and overriding `observe()` and `decide()`.

18
SYSTEM.md Normal file
View file

@ -0,0 +1,18 @@
You are Mistle, a autonomous Agent playing the German-speaking MUD silberland.
Your goal is to level up as much as possible and explore the world.
## Behavior
- Be concise, friendly, and informal.
- When replying to players, answer in German.
- Keep answers practical and in-character for a MUD helper bot.
- Use emoticons only when appropriate: :) ;) 8) :( ;( :P X) XD :D
## Tool Usage
Use tools only when appropriate. Think how you can solve problems without
using a tool. Do not use the "exploration" tool multiple times in a row.
## Mud Commands
The following MUD commands may be helpful to you
schau - get a description of the current environment
info - examine your own stats
hilfe - get additional help in general and about specific commands

View file

@ -14,19 +14,24 @@ except ImportError: # pragma: no cover
completion = None # type: ignore[assignment]
from agents import Agent
from system_prompt import load_system_prompt
ToolInvoker = Callable[[str], bool]
CommandExecutor = Callable[[str], None]
DEFAULT_AGENT_SYSTEM_PROMPT = (
"You are Mistle, a helpful MUD assistant. "
"You can either call tools or send plain commands to the MUD."
)
@dataclass
class IntelligentAgent(Agent):
"""LLM-driven agent that decides between tools and raw commands."""
model: str = "mistral/mistral-large-2407"
system_prompt: str = (
"You are Mistle, a helpful MUD assistant. "
"You can either call tools or send plain commands to the MUD."
system_prompt: str = field(
default_factory=lambda: load_system_prompt(default=DEFAULT_AGENT_SYSTEM_PROMPT)
)
temperature: float = 0.7
max_output_tokens: int = 200

View file

@ -11,20 +11,27 @@ try:
except ImportError: # pragma: no cover - optional dependency
completion = None # type: ignore[assignment]
from system_prompt import load_system_prompt
from tools import Tool
DEFAULT_COMMUNICATION_SYSTEM_PROMPT = (
"Du bist Mistle, ein hilfsbereiter MUD-Bot. "
"Antworte freundlich und knapp in deutscher Sprache."
"Sprich informell und freundlich."
"Antworte immer auf Deutsch."
"Verwende emoticons, wenn es angebracht ist: :) ;) 8) :( ;( :P X) XD :D"
)
@dataclass
class IntelligentCommunicationTool(Tool):
"""Tool that uses a language model to answer private tells."""
model: str = "mistral/mistral-tiny"
system_prompt: str = (
"Du bist Mistle, ein hilfsbereiter MUD-Bot. "
"Antworte freundlich und knapp in deutscher Sprache."
"Sprich informell und freundlich."
"Antworte immer auf Deutsch."
"Verwende emoticons, wenn es angebracht ist: :) ;) 8) :( ;( :P X) XD :D"
system_prompt: str = field(
default_factory=lambda: load_system_prompt(
default=DEFAULT_COMMUNICATION_SYSTEM_PROMPT
)
)
temperature: float = 0.7
max_output_tokens: int = 120

34
system_prompt.py Normal file
View file

@ -0,0 +1,34 @@
from __future__ import annotations
import sys
from pathlib import Path
_WARNED_PATHS: set[Path] = set()
def load_system_prompt(*, default: str, path: str = "SYSTEM.md") -> str:
"""Load the system prompt from disk, with a fallback default."""
prompt_path = Path(path)
try:
content = prompt_path.read_text(encoding="utf-8").strip()
except FileNotFoundError:
_warn_once(prompt_path, "not found")
return default
except OSError as exc: # pragma: no cover - environment specific
_warn_once(prompt_path, f"unreadable ({exc})")
return default
if not content:
_warn_once(prompt_path, "empty")
return default
return content
def _warn_once(path: Path, reason: str) -> None:
if path in _WARNED_PATHS:
return
_WARNED_PATHS.add(path)
print(
f"[Prompt] {path} is {reason}; using built-in default prompt.",
file=sys.stderr,
)