Settings: replace llama-server with Ollama, remove Local AI tab, rename Developer to Debug
Some checks failed
Release / Bump version and tag (push) Has been cancelled
Release / Build App (Linux) (push) Has been cancelled
Release / Build App (Windows) (push) Has been cancelled
Release / Build App (macOS) (push) Has been cancelled

- AI Provider: "Local (llama-server)" changed to "Ollama" with URL and
  model fields (defaults to localhost:11434, llama3.2)
- Ollama connects via its OpenAI-compatible API (/v1 endpoint)
- Removed empty "Local AI" tab
- Renamed "Developer" tab to "Debug"

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Claude
2026-03-22 11:55:06 -07:00
parent 5af27f379d
commit 4a9b00111d
2 changed files with 24 additions and 27 deletions

View File

@@ -11,7 +11,7 @@
let { visible, onClose }: Props = $props();
let localSettings = $state<AppSettings>({ ...$settings });
let activeTab = $state<'transcription' | 'speakers' | 'ai' | 'local' | 'developer'>('transcription');
let activeTab = $state<'transcription' | 'speakers' | 'ai' | 'debug'>('transcription');
let modelStatus = $state<'idle' | 'downloading' | 'success' | 'error'>('idle');
let modelError = $state('');
let revealedFields = $state<Set<string>>(new Set());
@@ -81,11 +81,8 @@
<button class="tab" class:active={activeTab === 'ai'} onclick={() => activeTab = 'ai'}>
AI Provider
</button>
<button class="tab" class:active={activeTab === 'local'} onclick={() => activeTab = 'local'}>
Local AI
</button>
<button class="tab" class:active={activeTab === 'developer'} onclick={() => activeTab = 'developer'}>
Developer
<button class="tab" class:active={activeTab === 'debug'} onclick={() => activeTab = 'debug'}>
Debug
</button>
</div>
@@ -184,14 +181,27 @@
<div class="field">
<label for="ai-provider">AI Provider</label>
<select id="ai-provider" bind:value={localSettings.ai_provider}>
<option value="local">Local (llama-server)</option>
<option value="local">Ollama</option>
<option value="openai">OpenAI</option>
<option value="anthropic">Anthropic</option>
<option value="litellm">OpenAI Compatible</option>
</select>
</div>
{#if localSettings.ai_provider === 'openai'}
{#if localSettings.ai_provider === 'local'}
<div class="field">
<label for="ollama-url">Ollama URL</label>
<input id="ollama-url" type="text" bind:value={localSettings.ollama_url} placeholder="http://localhost:11434" />
</div>
<div class="field">
<label for="ollama-model">Model</label>
<input id="ollama-model" type="text" bind:value={localSettings.ollama_model} placeholder="llama3.2" />
</div>
<p class="hint">
Install Ollama from ollama.com, then pull a model with <code>ollama pull llama3.2</code>.
The app connects via Ollama's OpenAI-compatible API.
</p>
{:else if localSettings.ai_provider === 'openai'}
<div class="field">
<label for="openai-key">OpenAI API Key</label>
<div class="input-reveal">
@@ -232,20 +242,7 @@
<input id="litellm-model" type="text" bind:value={localSettings.litellm_model} placeholder="provider/model-name" />
</div>
{/if}
{:else}
<div class="field">
<label for="llama-binary">llama-server Binary Path</label>
<input id="llama-binary" type="text" bind:value={localSettings.local_binary_path} placeholder="llama-server" />
</div>
<div class="field">
<label for="llama-model">GGUF Model Path</label>
<input id="llama-model" type="text" bind:value={localSettings.local_model_path} placeholder="~/.voicetonotes/models/model.gguf" />
</div>
<p class="hint">
Place GGUF model files in ~/.voicetonotes/models/ for auto-detection.
The local AI server uses the OpenAI-compatible API from llama.cpp.
</p>
{:else if activeTab === 'developer'}
{:else if activeTab === 'debug'}
<div class="field checkbox">
<label>
<input