Settings: replace llama-server with Ollama, remove Local AI tab, rename Developer to Debug
Some checks failed
Release / Bump version and tag (push) Has been cancelled
Release / Build App (Linux) (push) Has been cancelled
Release / Build App (Windows) (push) Has been cancelled
Release / Build App (macOS) (push) Has been cancelled

- AI Provider: "Local (llama-server)" changed to "Ollama" with URL and
  model fields (defaults to localhost:11434, llama3.2)
- Ollama connects via its OpenAI-compatible API (/v1 endpoint)
- Removed empty "Local AI" tab
- Renamed "Developer" tab to "Debug"

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Claude
2026-03-22 11:55:06 -07:00
parent 5af27f379d
commit 4a9b00111d
2 changed files with 24 additions and 27 deletions

View File

@@ -10,8 +10,8 @@ export interface AppSettings {
litellm_model: string;
litellm_api_key: string;
litellm_api_base: string;
local_model_path: string;
local_binary_path: string;
ollama_url: string;
ollama_model: string;
transcription_model: string;
transcription_device: string;
transcription_language: string;
@@ -30,8 +30,8 @@ const defaults: AppSettings = {
litellm_model: 'gpt-4o-mini',
litellm_api_key: '',
litellm_api_base: '',
local_model_path: '',
local_binary_path: 'llama-server',
ollama_url: 'http://localhost:11434',
ollama_model: 'llama3.2',
transcription_model: 'base',
transcription_device: 'cpu',
transcription_language: '',
@@ -61,7 +61,7 @@ export async function saveSettings(s: AppSettings): Promise<void> {
openai: { api_key: s.openai_api_key, model: s.openai_model },
anthropic: { api_key: s.anthropic_api_key, model: s.anthropic_model },
litellm: { api_key: s.litellm_api_key, api_base: s.litellm_api_base, model: s.litellm_model },
local: { model: s.local_model_path, base_url: 'http://localhost:8080' },
local: { model: s.ollama_model, base_url: s.ollama_url + '/v1' },
};
const config = configMap[s.ai_provider];
if (config) {