Feature: Add dynamic model selection to ViewReceipt page
Enhance receipt parsing model selection: - Fetch available models from LlamaCpp server dynamically - Show loaded/unloaded status in model dropdown - Persist selected model to appsettings.json - Read default model from AI:ReceiptParsingModel config - Inject LlamaCppVisionClient and IConfiguration dependencies Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -4,6 +4,8 @@ using Microsoft.EntityFrameworkCore;
|
||||
using MoneyMap.Data;
|
||||
using MoneyMap.Models;
|
||||
using MoneyMap.Services;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace MoneyMap.Pages
|
||||
{
|
||||
@@ -12,22 +14,33 @@ namespace MoneyMap.Pages
|
||||
private readonly MoneyMapContext _db;
|
||||
private readonly IReceiptManager _receiptManager;
|
||||
private readonly IEnumerable<IReceiptParser> _parsers;
|
||||
private readonly LlamaCppVisionClient _llamaClient;
|
||||
private readonly IConfiguration _config;
|
||||
private readonly IWebHostEnvironment _env;
|
||||
|
||||
public ViewReceiptModel(
|
||||
MoneyMapContext db,
|
||||
IReceiptManager receiptManager,
|
||||
IEnumerable<IReceiptParser> parsers)
|
||||
IEnumerable<IReceiptParser> parsers,
|
||||
LlamaCppVisionClient llamaClient,
|
||||
IConfiguration config,
|
||||
IWebHostEnvironment env)
|
||||
{
|
||||
_db = db;
|
||||
_receiptManager = receiptManager;
|
||||
_parsers = parsers;
|
||||
_llamaClient = llamaClient;
|
||||
_config = config;
|
||||
_env = env;
|
||||
}
|
||||
|
||||
public Receipt? Receipt { get; set; }
|
||||
public List<ReceiptLineItem> LineItems { get; set; } = new();
|
||||
public List<ReceiptParseLog> ParseLogs { get; set; } = new();
|
||||
public List<ParserOption> AvailableParsers { get; set; } = new();
|
||||
public List<LlamaCppModel> AvailableModels { get; set; } = new();
|
||||
public string ReceiptUrl { get; set; } = "";
|
||||
public string SelectedModel => _config["AI:ReceiptParsingModel"] ?? "gpt-4o-mini";
|
||||
|
||||
[TempData]
|
||||
public string? SuccessMessage { get; set; }
|
||||
@@ -59,6 +72,9 @@ namespace MoneyMap.Pages
|
||||
FullName = p.GetType().Name
|
||||
}).ToList();
|
||||
|
||||
// Get available LlamaCpp models
|
||||
AvailableModels = await _llamaClient.GetAvailableModelsAsync();
|
||||
|
||||
return Page();
|
||||
}
|
||||
|
||||
@@ -94,6 +110,12 @@ namespace MoneyMap.Pages
|
||||
return RedirectToPage(new { id });
|
||||
}
|
||||
|
||||
// Save selected model to config if it changed
|
||||
if (!string.IsNullOrEmpty(model) && model != SelectedModel)
|
||||
{
|
||||
await SaveSelectedModelAsync(model);
|
||||
}
|
||||
|
||||
var result = await selectedParser.ParseReceiptAsync(id, model);
|
||||
|
||||
if (result.IsSuccess)
|
||||
@@ -108,6 +130,33 @@ namespace MoneyMap.Pages
|
||||
return RedirectToPage(new { id });
|
||||
}
|
||||
|
||||
private async Task SaveSelectedModelAsync(string model)
|
||||
{
|
||||
try
|
||||
{
|
||||
var appSettingsPath = Path.Combine(_env.ContentRootPath, "appsettings.json");
|
||||
var json = await System.IO.File.ReadAllTextAsync(appSettingsPath);
|
||||
var jsonNode = JsonNode.Parse(json);
|
||||
|
||||
if (jsonNode == null) return;
|
||||
|
||||
// Ensure AI section exists
|
||||
if (jsonNode["AI"] == null)
|
||||
{
|
||||
jsonNode["AI"] = new JsonObject();
|
||||
}
|
||||
|
||||
jsonNode["AI"]!["ReceiptParsingModel"] = model;
|
||||
|
||||
var options = new JsonSerializerOptions { WriteIndented = true };
|
||||
await System.IO.File.WriteAllTextAsync(appSettingsPath, jsonNode.ToJsonString(options));
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Silently fail - not critical if we can't save the preference
|
||||
}
|
||||
}
|
||||
|
||||
public class ParserOption
|
||||
{
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
Reference in New Issue
Block a user