Add scoreboard roast feature via image analysis
When @mentioned with an image attachment, the bot now roasts players based on scoreboard screenshots using the vision model. Text-only mentions continue to work as before. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@@ -238,6 +239,55 @@ class LLMClient:
|
||||
logger.error("LLM chat error: %s", e)
|
||||
return None
|
||||
|
||||
async def analyze_image(
|
||||
self,
|
||||
image_bytes: bytes,
|
||||
system_prompt: str,
|
||||
user_text: str = "",
|
||||
on_first_token=None,
|
||||
) -> str | None:
|
||||
"""Send an image to the vision model with a system prompt.
|
||||
|
||||
Returns the generated text response, or None on failure.
|
||||
"""
|
||||
b64 = base64.b64encode(image_bytes).decode()
|
||||
data_url = f"data:image/png;base64,{b64}"
|
||||
|
||||
user_content: list[dict] = [
|
||||
{"type": "image_url", "image_url": {"url": data_url}},
|
||||
]
|
||||
if user_text:
|
||||
user_content.append({"type": "text", "text": user_text})
|
||||
|
||||
async with self._semaphore:
|
||||
try:
|
||||
stream = await self._client.chat.completions.create(
|
||||
model=self.model,
|
||||
messages=[
|
||||
{"role": "system", "content": system_prompt},
|
||||
{"role": "user", "content": user_content},
|
||||
],
|
||||
temperature=0.8,
|
||||
max_tokens=500,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
chunks: list[str] = []
|
||||
notified = False
|
||||
async for chunk in stream:
|
||||
delta = chunk.choices[0].delta if chunk.choices else None
|
||||
if delta and delta.content:
|
||||
if not notified and on_first_token:
|
||||
await on_first_token()
|
||||
notified = True
|
||||
chunks.append(delta.content)
|
||||
|
||||
content = "".join(chunks).strip()
|
||||
return content if content else None
|
||||
except Exception as e:
|
||||
logger.error("LLM image analysis error: %s", e)
|
||||
return None
|
||||
|
||||
async def raw_analyze(self, message: str, context: str = "", user_notes: str = "") -> tuple[str, dict | None]:
|
||||
"""Return the raw LLM response string AND parsed result for /bcs-test (single LLM call)."""
|
||||
user_content = f"=== CONTEXT (other users' recent messages, for background only) ===\n{context}\n\n"
|
||||
|
||||
Reference in New Issue
Block a user