Bump health check max_completion_tokens to 16

gpt-5-nano can't produce output with max_completion_tokens=1.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-24 17:08:32 -05:00
parent a0edf90ebd
commit 01b7a6b240

2
bot.py
View File

@@ -146,7 +146,7 @@ class BCSBot(commands.Bot):
await self.llm._client.chat.completions.create(
model=self.llm.model,
messages=[{"role": "user", "content": "hi"}],
max_completion_tokens=1,
max_completion_tokens=16,
)
logger.info("LLM connectivity check passed.")
except Exception as e: