Feature: Add Settings page with centralized AI model configuration

- Add Settings page for AI model selection with load status indicators
- Add ModelWarmupService to preload configured model on app startup
- Consolidate AI model config to single AI:ReceiptParsingModel setting
- Simplify ViewReceipt and AICategorizePreview to use Settings model
- Improve AI categorization confidence prompt for varied scores

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-17 18:06:02 -05:00
parent 29d26b4771
commit c5fad34658
9 changed files with 306 additions and 139 deletions

View File

@@ -36,25 +36,10 @@
{
<input type="hidden" name="transactionIds" value="@id" />
}
@if (Model.AIProvider.Equals("LlamaCpp", StringComparison.OrdinalIgnoreCase) && Model.AvailableModels.Any())
{
<div class="mb-3" style="max-width: 400px;">
<label for="model" class="form-label">Model</label>
<select name="model" id="model" class="form-select">
@foreach (var m in Model.AvailableModels)
{
var isSelected = m.Id == Model.SelectedModel;
<option value="@m.Id" selected="@isSelected">
@(m.IsLoaded ? "● " : "○ ")@m.Id
</option>
}
</select>
<div class="form-text">
<span style="color: #28a745;">●</span> Loaded
<span class="ms-2" style="color: #6c757d;">○</span> Not loaded
</div>
</div>
}
<p class="text-muted small mb-3">
Using: <strong>@Model.SelectedModel</strong>
<a href="/Settings" class="ms-2 small">Change</a>
</p>
<button type="submit" class="btn btn-primary">
Generate Suggestions for @Model.SelectedTransactionCount Transaction(s)
</button>
@@ -66,25 +51,10 @@
<form method="post" asp-page-handler="Generate"
onsubmit="this.querySelector('button[type=submit]').disabled = true; this.querySelector('button[type=submit]').innerHTML = '<span class=\'spinner-border spinner-border-sm me-2\'></span>Analyzing transactions...';">
@if (Model.AIProvider.Equals("LlamaCpp", StringComparison.OrdinalIgnoreCase) && Model.AvailableModels.Any())
{
<div class="mb-3" style="max-width: 400px;">
<label for="model" class="form-label">Model</label>
<select name="model" id="model" class="form-select">
@foreach (var m in Model.AvailableModels)
{
var isSelected = m.Id == Model.SelectedModel;
<option value="@m.Id" selected="@isSelected">
@(m.IsLoaded ? "● " : "○ ")@m.Id
</option>
}
</select>
<div class="form-text">
<span style="color: #28a745;">●</span> Loaded
<span class="ms-2" style="color: #6c757d;">○</span> Not loaded
</div>
</div>
}
<p class="text-muted small mb-3">
Using: <strong>@Model.SelectedModel</strong>
<a href="/Settings" class="ms-2 small">Change</a>
</p>
<button type="submit" class="btn btn-primary">
Generate Suggestions (up to 50 uncategorized)
</button>

View File

@@ -12,26 +12,22 @@ namespace MoneyMap.Pages
{
private readonly MoneyMapContext _db;
private readonly ITransactionAICategorizer _aiCategorizer;
private readonly LlamaCppVisionClient _llamaClient;
private readonly IConfiguration _config;
public AICategorizePreviewModel(
MoneyMapContext db,
ITransactionAICategorizer aiCategorizer,
LlamaCppVisionClient llamaClient,
IConfiguration config)
{
_db = db;
_aiCategorizer = aiCategorizer;
_llamaClient = llamaClient;
_config = config;
}
public List<ProposalViewModel> Proposals { get; set; } = new();
public string ModelUsed { get; set; } = "";
public string AIProvider => _config["AI:CategorizationProvider"] ?? "OpenAI";
public List<LlamaCppModel> AvailableModels { get; set; } = new();
public string SelectedModel => _config["AI:CategorizationModel"] ?? "qwen2.5-coder-32b-instruct-q6_k";
public string SelectedModel => _config["AI:ReceiptParsingModel"] ?? "gpt-4o-mini";
[TempData]
public string? StoredTransactionIds { get; set; }
@@ -51,12 +47,6 @@ namespace MoneyMap.Pages
public async Task<IActionResult> OnGetAsync()
{
// Load models for the dropdown
if (AIProvider.Equals("LlamaCpp", StringComparison.OrdinalIgnoreCase))
{
AvailableModels = await _llamaClient.GetAvailableModelsAsync();
}
// Load transaction IDs from TempData if available
if (!string.IsNullOrEmpty(StoredTransactionIds))
{
@@ -96,7 +86,7 @@ namespace MoneyMap.Pages
return RedirectToPage();
}
public async Task<IActionResult> OnPostGenerateAsync(string? model)
public async Task<IActionResult> OnPostGenerateAsync()
{
var uncategorized = await _db.Transactions
.Include(t => t.Card)
@@ -114,7 +104,7 @@ namespace MoneyMap.Pages
return RedirectToPage("/Recategorize");
}
var proposals = await _aiCategorizer.ProposeBatchCategorizationAsync(uncategorized, model);
var proposals = await _aiCategorizer.ProposeBatchCategorizationAsync(uncategorized, SelectedModel);
if (proposals.Count == 0)
{
@@ -136,12 +126,12 @@ namespace MoneyMap.Pages
}).ToList();
ProposalsJson = JsonSerializer.Serialize(storedProposals);
ModelUsed = model ?? SelectedModel;
ModelUsed = SelectedModel;
return RedirectToPage();
}
public async Task<IActionResult> OnPostGenerateForIdsAsync(long[]? transactionIds, string? model)
public async Task<IActionResult> OnPostGenerateForIdsAsync(long[]? transactionIds)
{
// Try to get IDs from form first, then from TempData
if ((transactionIds == null || transactionIds.Length == 0) && !string.IsNullOrEmpty(StoredTransactionIds))
@@ -171,7 +161,7 @@ namespace MoneyMap.Pages
return RedirectToPage("/Transactions");
}
var proposals = await _aiCategorizer.ProposeBatchCategorizationAsync(transactions, model);
var proposals = await _aiCategorizer.ProposeBatchCategorizationAsync(transactions, SelectedModel);
if (proposals.Count == 0)
{
@@ -193,7 +183,7 @@ namespace MoneyMap.Pages
}).ToList();
ProposalsJson = JsonSerializer.Serialize(storedProposals);
ModelUsed = model ?? SelectedModel;
ModelUsed = SelectedModel;
return RedirectToPage();
}

View File

@@ -0,0 +1,93 @@
@page
@model MoneyMap.Pages.SettingsModel
@{
ViewData["Title"] = "Settings";
}
<h2 class="mb-4">Settings</h2>
@if (!string.IsNullOrEmpty(Model.SuccessMessage))
{
<div class="alert alert-success alert-dismissible fade show" role="alert">
@Model.SuccessMessage
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
</div>
}
@if (!string.IsNullOrEmpty(Model.ErrorMessage))
{
<div class="alert alert-danger alert-dismissible fade show" role="alert">
@Model.ErrorMessage
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
</div>
}
<div class="row">
<div class="col-lg-6">
<div class="card shadow-sm mb-4">
<div class="card-header">
<strong>AI Model Configuration</strong>
</div>
<div class="card-body">
<p class="text-muted small mb-3">
Select the AI model to use for receipt parsing and other AI features.
Models with <span style="color: #28a745;">●</span> are currently loaded and ready.
</p>
<form method="post" asp-page-handler="SaveModel">
<div class="mb-3">
<label for="model" class="form-label">Receipt Parsing Model</label>
<select name="model" id="model" class="form-select">
<optgroup label="Local Models (LlamaCpp)">
@foreach (var m in Model.AvailableModels)
{
var modelValue = $"llamacpp:{m.Id}";
var isSelected = Model.SelectedModel == modelValue;
<option value="@modelValue" selected="@isSelected">
@(m.IsLoaded ? "● " : "○ ")@m.Id
</option>
}
</optgroup>
<optgroup label="OpenAI">
<option value="gpt-4o-mini" selected="@(Model.SelectedModel == "gpt-4o-mini")">GPT-4o Mini</option>
<option value="gpt-4o" selected="@(Model.SelectedModel == "gpt-4o")">GPT-4o</option>
</optgroup>
<optgroup label="Anthropic">
<option value="claude-3-5-haiku-20241022" selected="@(Model.SelectedModel == "claude-3-5-haiku-20241022")">Claude 3.5 Haiku</option>
<option value="claude-3-5-sonnet-20241022" selected="@(Model.SelectedModel == "claude-3-5-sonnet-20241022")">Claude 3.5 Sonnet</option>
</optgroup>
</select>
<div class="form-text">
<span style="color: #28a745;">●</span> Loaded
<span class="ms-2" style="color: #6c757d;">○</span> Not loaded
</div>
</div>
<div class="d-flex gap-2">
<button type="submit" class="btn btn-primary">
Save Selection
</button>
<button type="submit" formaction="/Settings?handler=LoadModel" class="btn btn-outline-secondary">
Save & Load Model
</button>
</div>
</form>
</div>
</div>
<div class="card shadow-sm">
<div class="card-header">
<strong>Connection Settings</strong>
</div>
<div class="card-body">
<dl class="row mb-0">
<dt class="col-sm-4">Models Endpoint</dt>
<dd class="col-sm-8"><code>@Model.ModelsEndpoint</code></dd>
</dl>
<p class="text-muted small mt-2 mb-0">
Configure in <code>appsettings.json</code> under <code>AI:ModelsEndpoint</code>
</p>
</div>
</div>
</div>
</div>

View File

@@ -0,0 +1,114 @@
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.RazorPages;
using MoneyMap.Services;
using System.Text.Json;
using System.Text.Json.Nodes;
namespace MoneyMap.Pages
{
public class SettingsModel : PageModel
{
private readonly LlamaCppVisionClient _llamaClient;
private readonly IConfiguration _config;
private readonly IWebHostEnvironment _env;
private readonly ILogger<SettingsModel> _logger;
public SettingsModel(
LlamaCppVisionClient llamaClient,
IConfiguration config,
IWebHostEnvironment env,
ILogger<SettingsModel> logger)
{
_llamaClient = llamaClient;
_config = config;
_env = env;
_logger = logger;
}
public List<LlamaCppModel> AvailableModels { get; set; } = new();
public string SelectedModel => _config["AI:ReceiptParsingModel"] ?? "gpt-4o-mini";
public string ModelsEndpoint => _config["AI:ModelsEndpoint"] ?? "http://athena.lan:11434";
[TempData]
public string? SuccessMessage { get; set; }
[TempData]
public string? ErrorMessage { get; set; }
public async Task OnGetAsync()
{
AvailableModels = await _llamaClient.GetAvailableModelsAsync();
}
public async Task<IActionResult> OnPostSaveModelAsync(string model)
{
if (string.IsNullOrEmpty(model))
{
ErrorMessage = "No model selected.";
return RedirectToPage();
}
await SaveSelectedModelAsync(model);
SuccessMessage = $"AI model updated to: {model}";
return RedirectToPage();
}
public async Task<IActionResult> OnPostLoadModelAsync(string model)
{
if (string.IsNullOrEmpty(model))
{
ErrorMessage = "No model selected.";
return RedirectToPage();
}
// Save the model first
await SaveSelectedModelAsync(model);
// Fire a warmup request in the background (don't await)
_ = WarmupModelAsync(model);
SuccessMessage = $"Model {model} is being loaded. This may take a moment.";
return RedirectToPage();
}
private async Task WarmupModelAsync(string model)
{
try
{
_logger.LogInformation("Warming up model: {Model}", model);
await _llamaClient.SendTextPromptAsync("Hello", model);
_logger.LogInformation("Model warmup completed: {Model}", model);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Model warmup failed for {Model}: {Message}", model, ex.Message);
}
}
private async Task SaveSelectedModelAsync(string model)
{
try
{
var appSettingsPath = Path.Combine(_env.ContentRootPath, "appsettings.json");
var json = await System.IO.File.ReadAllTextAsync(appSettingsPath);
var jsonNode = JsonNode.Parse(json);
if (jsonNode == null) return;
if (jsonNode["AI"] == null)
{
jsonNode["AI"] = new JsonObject();
}
jsonNode["AI"]!["ReceiptParsingModel"] = model;
var options = new JsonSerializerOptions { WriteIndented = true };
await System.IO.File.WriteAllTextAsync(appSettingsPath, jsonNode.ToJsonString(options));
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save model selection: {Message}", ex.Message);
}
}
}
}

View File

@@ -144,7 +144,7 @@
</div>
</div>
<!-- Parser Selection -->
<!-- Parse Receipt -->
<div class="card shadow-sm mb-3">
<div class="card-header">
<strong>Parse Receipt</strong>
@@ -153,36 +153,11 @@
@if (Model.AvailableParsers.Any())
{
<form method="post" asp-page-handler="Parse" asp-route-id="@Model.Receipt.Id">
<div class="mb-2">
<label for="parser" class="form-label small">Select Parser</label>
<select name="parser" id="parser" class="form-select form-select-sm">
@foreach (var parser in Model.AvailableParsers)
{
<option value="@parser.FullName">@parser.Name</option>
}
</select>
</div>
<div class="mb-2">
<label for="model" class="form-label small">AI Model</label>
<select name="model" id="model" class="form-select form-select-sm">
@foreach (var m in Model.AvailableModels)
{
var modelValue = $"llamacpp:{m.Id}";
var isSelected = Model.SelectedModel == modelValue;
<option value="@modelValue" selected="@isSelected">
@(m.IsLoaded ? "● " : "○ ")@m.Id
</option>
}
<option value="gpt-4o-mini" selected="@(Model.SelectedModel == "gpt-4o-mini")">GPT-4o Mini</option>
<option value="gpt-4o" selected="@(Model.SelectedModel == "gpt-4o")">GPT-4o</option>
<option value="claude-3-5-haiku-20241022" selected="@(Model.SelectedModel == "claude-3-5-haiku-20241022")">Claude 3.5 Haiku</option>
<option value="claude-3-5-sonnet-20241022" selected="@(Model.SelectedModel == "claude-3-5-sonnet-20241022")">Claude 3.5 Sonnet</option>
</select>
<div class="form-text small">
<span style="color: #28a745;">●</span> Loaded
<span class="ms-2" style="color: #6c757d;">○</span> Not loaded
</div>
</div>
<input type="hidden" name="parser" value="@Model.AvailableParsers.First().FullName" />
<p class="text-muted small mb-2">
Using: <strong>@Model.SelectedModel</strong>
<a href="/Settings" class="ms-2 small">Change</a>
</p>
<button type="submit" class="btn btn-primary btn-sm w-100">
Parse Receipt
</button>

View File

@@ -4,8 +4,6 @@ using Microsoft.EntityFrameworkCore;
using MoneyMap.Data;
using MoneyMap.Models;
using MoneyMap.Services;
using System.Text.Json;
using System.Text.Json.Nodes;
namespace MoneyMap.Pages
{
@@ -14,31 +12,24 @@ namespace MoneyMap.Pages
private readonly MoneyMapContext _db;
private readonly IReceiptManager _receiptManager;
private readonly IEnumerable<IReceiptParser> _parsers;
private readonly LlamaCppVisionClient _llamaClient;
private readonly IConfiguration _config;
private readonly IWebHostEnvironment _env;
public ViewReceiptModel(
MoneyMapContext db,
IReceiptManager receiptManager,
IEnumerable<IReceiptParser> parsers,
LlamaCppVisionClient llamaClient,
IConfiguration config,
IWebHostEnvironment env)
IConfiguration config)
{
_db = db;
_receiptManager = receiptManager;
_parsers = parsers;
_llamaClient = llamaClient;
_config = config;
_env = env;
}
public Receipt? Receipt { get; set; }
public List<ReceiptLineItem> LineItems { get; set; } = new();
public List<ReceiptParseLog> ParseLogs { get; set; } = new();
public List<ParserOption> AvailableParsers { get; set; } = new();
public List<LlamaCppModel> AvailableModels { get; set; } = new();
public string ReceiptUrl { get; set; } = "";
public string SelectedModel => _config["AI:ReceiptParsingModel"] ?? "gpt-4o-mini";
@@ -72,9 +63,6 @@ namespace MoneyMap.Pages
FullName = p.GetType().Name
}).ToList();
// Get available LlamaCpp models
AvailableModels = await _llamaClient.GetAvailableModelsAsync();
return Page();
}
@@ -100,7 +88,7 @@ namespace MoneyMap.Pages
return File(fileBytes, receipt.ContentType);
}
public async Task<IActionResult> OnPostParseAsync(long id, string parser, string? model = null)
public async Task<IActionResult> OnPostParseAsync(long id, string parser)
{
var selectedParser = _parsers.FirstOrDefault(p => p.GetType().Name == parser);
@@ -110,13 +98,8 @@ namespace MoneyMap.Pages
return RedirectToPage(new { id });
}
// Save selected model to config if it changed
if (!string.IsNullOrEmpty(model) && model != SelectedModel)
{
await SaveSelectedModelAsync(model);
}
var result = await selectedParser.ParseReceiptAsync(id, model);
// Use the configured model from settings
var result = await selectedParser.ParseReceiptAsync(id, SelectedModel);
if (result.IsSuccess)
{
@@ -130,33 +113,6 @@ namespace MoneyMap.Pages
return RedirectToPage(new { id });
}
private async Task SaveSelectedModelAsync(string model)
{
try
{
var appSettingsPath = Path.Combine(_env.ContentRootPath, "appsettings.json");
var json = await System.IO.File.ReadAllTextAsync(appSettingsPath);
var jsonNode = JsonNode.Parse(json);
if (jsonNode == null) return;
// Ensure AI section exists
if (jsonNode["AI"] == null)
{
jsonNode["AI"] = new JsonObject();
}
jsonNode["AI"]!["ReceiptParsingModel"] = model;
var options = new JsonSerializerOptions { WriteIndented = true };
await System.IO.File.WriteAllTextAsync(appSettingsPath, jsonNode.ToJsonString(options));
}
catch
{
// Silently fail - not critical if we can't save the preference
}
}
public class ParserOption
{
public string Name { get; set; } = "";

View File

@@ -71,6 +71,9 @@ builder.Services.AddScoped<IReceiptParser, AIReceiptParser>();
// AI categorization service
builder.Services.AddHttpClient<ITransactionAICategorizer, TransactionAICategorizer>();
// Model warmup service - preloads the configured AI model on startup
builder.Services.AddHostedService<ModelWarmupService>();
// Financial audit API service
builder.Services.AddScoped<IFinancialAuditService, FinancialAuditService>();

View File

@@ -0,0 +1,63 @@
namespace MoneyMap.Services
{
/// <summary>
/// Background service that warms up the configured AI model on application startup.
/// Sends a simple prompt to preload the model without blocking the UI.
/// </summary>
public class ModelWarmupService : BackgroundService
{
private readonly IServiceProvider _serviceProvider;
private readonly IConfiguration _configuration;
private readonly ILogger<ModelWarmupService> _logger;
public ModelWarmupService(
IServiceProvider serviceProvider,
IConfiguration configuration,
ILogger<ModelWarmupService> logger)
{
_serviceProvider = serviceProvider;
_configuration = configuration;
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
var model = _configuration["AI:ReceiptParsingModel"];
// Only warm up local models (llamacpp: prefix)
if (string.IsNullOrEmpty(model) || !model.StartsWith("llamacpp:"))
{
_logger.LogInformation("Model warmup skipped - configured model is not a local model: {Model}", model ?? "(none)");
return;
}
// Small delay to let the app fully start
await Task.Delay(TimeSpan.FromSeconds(2), stoppingToken);
_logger.LogInformation("Starting model warmup for: {Model}", model);
try
{
using var scope = _serviceProvider.CreateScope();
var llamaClient = scope.ServiceProvider.GetRequiredService<LlamaCppVisionClient>();
// Send a simple prompt to load the model
var result = await llamaClient.SendTextPromptAsync("Hello", model);
if (result.IsSuccess)
{
_logger.LogInformation("Model warmup completed successfully for: {Model}", model);
}
else
{
_logger.LogWarning("Model warmup failed for {Model}: {Error}", model, result.ErrorMessage);
}
}
catch (Exception ex)
{
// Don't crash the app if warmup fails - just log it
_logger.LogWarning(ex, "Model warmup failed for {Model}: {Message}", model, ex.Message);
}
}
}
}

View File

@@ -243,6 +243,8 @@ public class TransactionAICategorizer : ITransactionAICategorizer
if (transaction.IsTransfer)
sb.AppendLine($"- Transfer to: {transaction.TransferToAccount?.DisplayLabel ?? "Unknown"}");
sb.AppendLine();
sb.AppendLine($"Existing categories in this system: {categoryList}");
sb.AppendLine();
sb.AppendLine("Provide your analysis in JSON format:");
sb.AppendLine("{");
@@ -250,13 +252,14 @@ public class TransactionAICategorizer : ITransactionAICategorizer
sb.AppendLine(" \"canonical_merchant\": \"Clean merchant name (e.g., 'Walmart' from 'WAL-MART #1234')\",");
sb.AppendLine(" \"pattern\": \"Pattern to match future transactions (e.g., 'WALMART' or 'SUBWAY')\",");
sb.AppendLine(" \"priority\": 0,");
sb.AppendLine(" \"confidence\": 0.95,");
sb.AppendLine(" \"confidence\": 0.85,");
sb.AppendLine(" \"reasoning\": \"Brief explanation\"");
sb.AppendLine("}");
sb.AppendLine();
sb.AppendLine($"Existing categories in this system: {categoryList}");
sb.AppendLine();
sb.AppendLine("Prefer using existing categories when appropriate. Return ONLY valid JSON, no additional text.");
sb.AppendLine("Guidelines:");
sb.AppendLine("- Prefer using existing categories when appropriate");
sb.AppendLine("- confidence: Your certainty in this categorization (0.0-1.0). Use ~0.9+ for obvious matches like 'WALMART' -> Groceries. Use ~0.7-0.8 for likely matches. Use ~0.5-0.6 for uncertain/ambiguous transactions.");
sb.AppendLine("- Return ONLY valid JSON, no additional text.");
return sb.ToString();
}
@@ -325,7 +328,7 @@ public class TransactionAICategorizer : ITransactionAICategorizer
{
try
{
var selectedModel = model ?? _config["AI:CategorizationModel"] ?? "qwen2.5-coder-32b-instruct-q6_k";
var selectedModel = model ?? _config["AI:ReceiptParsingModel"] ?? "gpt-4o-mini";
var systemPrompt = "You are a financial transaction categorization expert. Always respond with valid JSON only.";
var fullPrompt = $"{systemPrompt}\n\n{prompt}";