Strip leaked metadata from LLM responses

The local LLM was echoing back [Server context: ...] metadata lines
in its responses despite prompt instructions not to. Now stripped
via regex before sending to Discord.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-23 10:23:49 -05:00
parent c3274dc702
commit e1dea84d08

View File

@@ -1,6 +1,7 @@
import asyncio
import logging
import random
import re
from collections import deque
from pathlib import Path
@@ -160,7 +161,13 @@ class ChatCog(commands.Cog):
if typing_ctx:
await typing_ctx.__aexit__(None, None, None)
if response is None:
# Strip leaked metadata the LLM may echo back
if response:
response = re.sub(r"\[Server context:[^\]]*\]\n?", "", response)
response = re.sub(r"\[Replying to bot's message:[^\]]*\]\n?", "", response)
response = response.strip()
if not response:
log_channel = discord.utils.get(message.guild.text_channels, name="bcs-log")
if log_channel:
try: