Refactor: Consolidate AI endpoint config to AI:ModelsEndpoint
- Simplify model dropdown to single flat list with local models first - Show loaded/unloaded status with bullet indicators - Remove separate Ollama:BaseUrl and LlamaCpp:BaseUrl configs - All AI vision clients now use AI:ModelsEndpoint (default: athena.lan:11434) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -165,27 +165,23 @@
|
||||
<div class="mb-2">
|
||||
<label for="model" class="form-label small">AI Model</label>
|
||||
<select name="model" id="model" class="form-select form-select-sm">
|
||||
@if (Model.AvailableModels.Any())
|
||||
{
|
||||
<optgroup label="Local (llama.cpp)">
|
||||
@foreach (var m in Model.AvailableModels)
|
||||
{
|
||||
var modelValue = $"llamacpp:{m.Id}";
|
||||
<option value="@modelValue" selected="@(Model.SelectedModel == modelValue)">
|
||||
@(m.IsLoaded ? "[Loaded] " : "")@m.Id
|
||||
var isSelected = Model.SelectedModel == modelValue;
|
||||
<option value="@modelValue" selected="@isSelected">
|
||||
@(m.IsLoaded ? "● " : "○ ")@m.Id
|
||||
</option>
|
||||
}
|
||||
</optgroup>
|
||||
}
|
||||
<optgroup label="OpenAI">
|
||||
<option value="gpt-4o-mini" selected="@(Model.SelectedModel == "gpt-4o-mini")">GPT-4o Mini (Fast & Cheap)</option>
|
||||
<option value="gpt-4o" selected="@(Model.SelectedModel == "gpt-4o")">GPT-4o (Smarter)</option>
|
||||
</optgroup>
|
||||
<optgroup label="Anthropic">
|
||||
<option value="claude-3-5-haiku-20241022" selected="@(Model.SelectedModel == "claude-3-5-haiku-20241022")">Claude 3.5 Haiku (Fast)</option>
|
||||
<option value="claude-3-5-sonnet-20241022" selected="@(Model.SelectedModel == "claude-3-5-sonnet-20241022")">Claude 3.5 Sonnet (Best)</option>
|
||||
</optgroup>
|
||||
<option value="gpt-4o-mini" selected="@(Model.SelectedModel == "gpt-4o-mini")">GPT-4o Mini</option>
|
||||
<option value="gpt-4o" selected="@(Model.SelectedModel == "gpt-4o")">GPT-4o</option>
|
||||
<option value="claude-3-5-haiku-20241022" selected="@(Model.SelectedModel == "claude-3-5-haiku-20241022")">Claude 3.5 Haiku</option>
|
||||
<option value="claude-3-5-sonnet-20241022" selected="@(Model.SelectedModel == "claude-3-5-sonnet-20241022")">Claude 3.5 Sonnet</option>
|
||||
</select>
|
||||
<div class="form-text small">
|
||||
<span style="color: #28a745;">●</span> Loaded
|
||||
<span class="ms-2" style="color: #6c757d;">○</span> Not loaded
|
||||
</div>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary btn-sm w-100">
|
||||
Parse Receipt
|
||||
|
||||
@@ -240,7 +240,7 @@ namespace MoneyMap.Services
|
||||
/// </summary>
|
||||
public async Task<List<LlamaCppModel>> GetAvailableModelsAsync()
|
||||
{
|
||||
var baseUrl = _configuration["LlamaCpp:BaseUrl"] ?? "http://athena.lan:11434";
|
||||
var baseUrl = _configuration["AI:ModelsEndpoint"] ?? "http://athena.lan:11434";
|
||||
|
||||
try
|
||||
{
|
||||
@@ -278,7 +278,7 @@ namespace MoneyMap.Services
|
||||
/// </summary>
|
||||
public async Task<VisionApiResult> SendTextPromptAsync(string prompt, string? model = null)
|
||||
{
|
||||
var baseUrl = _configuration["LlamaCpp:BaseUrl"] ?? "http://athena.lan:11434";
|
||||
var baseUrl = _configuration["AI:ModelsEndpoint"] ?? "http://athena.lan:11434";
|
||||
var llamaModel = model ?? "GLM-4.6V-UD-Q4_K_XL-00001-of-00002";
|
||||
if (llamaModel.StartsWith("llamacpp:"))
|
||||
llamaModel = llamaModel[9..];
|
||||
@@ -340,7 +340,7 @@ namespace MoneyMap.Services
|
||||
|
||||
public async Task<VisionApiResult> AnalyzeImageAsync(string base64Image, string mediaType, string prompt, string model)
|
||||
{
|
||||
var baseUrl = _configuration["LlamaCpp:BaseUrl"] ?? "http://athena.lan:8080";
|
||||
var baseUrl = _configuration["AI:ModelsEndpoint"] ?? "http://athena.lan:11434";
|
||||
|
||||
// Strip "llamacpp:" prefix if present
|
||||
var llamaModel = model.StartsWith("llamacpp:") ? model[9..] : model;
|
||||
@@ -441,7 +441,7 @@ namespace MoneyMap.Services
|
||||
|
||||
public async Task<VisionApiResult> AnalyzeImageAsync(string base64Image, string mediaType, string prompt, string model)
|
||||
{
|
||||
var baseUrl = _configuration["Ollama:BaseUrl"] ?? "http://athena.lan:11434";
|
||||
var baseUrl = _configuration["AI:ModelsEndpoint"] ?? "http://athena.lan:11434";
|
||||
|
||||
// Strip "ollama:" prefix if present
|
||||
var ollamaModel = model.StartsWith("ollama:") ? model[7..] : model;
|
||||
|
||||
Reference in New Issue
Block a user