Include replied-to message text in LLM context

When a user replies to the bot's message, the original bot message
text is now included in the context sent to the LLM. This prevents
the LLM from misinterpreting follow-up questions like "what does
this even mean?" since it can see what message is being referenced.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-23 09:59:51 -05:00
parent 66ca97760b
commit b6cdea7329

View File

@@ -48,6 +48,7 @@ class ChatCog(commands.Cog):
should_reply = False
is_proactive = False
reply_context = "" # Text of the message being replied to
# Check if bot is @mentioned
if self.bot.user in message.mentions:
@@ -63,6 +64,8 @@ class ChatCog(commands.Cog):
)
if ref_msg.author.id == self.bot.user.id:
should_reply = True
if ref_msg.content:
reply_context = f"[Replying to bot's message: {ref_msg.content[:300]}]\n"
except discord.HTTPException:
pass
@@ -143,7 +146,7 @@ class ChatCog(commands.Cog):
score_context = f"[Server context: {message.author.display_name}{', '.join(context_parts)}]"
self._chat_history[ch_id].append(
{"role": "user", "content": f"{score_context}\n{message.author.display_name}: {content}"}
{"role": "user", "content": f"{score_context}\n{reply_context}{message.author.display_name}: {content}"}
)
active_prompt = self._get_active_prompt()