From 4a9b00111d89c1c33609717c1158829ffce3245a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 22 Mar 2026 11:55:06 -0700 Subject: [PATCH] Settings: replace llama-server with Ollama, remove Local AI tab, rename Developer to Debug - AI Provider: "Local (llama-server)" changed to "Ollama" with URL and model fields (defaults to localhost:11434, llama3.2) - Ollama connects via its OpenAI-compatible API (/v1 endpoint) - Removed empty "Local AI" tab - Renamed "Developer" tab to "Debug" Co-Authored-By: Claude Opus 4.6 --- src/lib/components/SettingsModal.svelte | 41 ++++++++++++------------- src/lib/stores/settings.ts | 10 +++--- 2 files changed, 24 insertions(+), 27 deletions(-) diff --git a/src/lib/components/SettingsModal.svelte b/src/lib/components/SettingsModal.svelte index b7a23f8..ffdc557 100644 --- a/src/lib/components/SettingsModal.svelte +++ b/src/lib/components/SettingsModal.svelte @@ -11,7 +11,7 @@ let { visible, onClose }: Props = $props(); let localSettings = $state({ ...$settings }); - let activeTab = $state<'transcription' | 'speakers' | 'ai' | 'local' | 'developer'>('transcription'); + let activeTab = $state<'transcription' | 'speakers' | 'ai' | 'debug'>('transcription'); let modelStatus = $state<'idle' | 'downloading' | 'success' | 'error'>('idle'); let modelError = $state(''); let revealedFields = $state>(new Set()); @@ -81,11 +81,8 @@ - - @@ -184,14 +181,27 @@
- {#if localSettings.ai_provider === 'openai'} + {#if localSettings.ai_provider === 'local'} +
+ + +
+
+ + +
+

+ Install Ollama from ollama.com, then pull a model with ollama pull llama3.2. + The app connects via Ollama's OpenAI-compatible API. +

+ {:else if localSettings.ai_provider === 'openai'}
@@ -232,20 +242,7 @@
{/if} - {:else} -
- - -
-
- - -
-

- Place GGUF model files in ~/.voicetonotes/models/ for auto-detection. - The local AI server uses the OpenAI-compatible API from llama.cpp. -

- {:else if activeTab === 'developer'} + {:else if activeTab === 'debug'}