Fix Ollama: remove duplicate stale configMap in AIChatPanel
AIChatPanel had its own hardcoded configMap with the old llama-server URL (localhost:8080) and field names (local_model_path). Every chat message reconfigured the provider with these wrong values, overriding the correct settings applied at startup. Fix: replace the duplicate with a call to the shared configureAIProvider(). Also strip trailing slashes from ollama_url before appending /v1 to prevent double-slash URLs (http://localhost:11434//v1). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,7 +1,7 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { invoke } from '@tauri-apps/api/core';
|
import { invoke } from '@tauri-apps/api/core';
|
||||||
import { segments, speakers } from '$lib/stores/transcript';
|
import { segments, speakers } from '$lib/stores/transcript';
|
||||||
import { settings } from '$lib/stores/settings';
|
import { settings, configureAIProvider } from '$lib/stores/settings';
|
||||||
|
|
||||||
interface ChatMessage {
|
interface ChatMessage {
|
||||||
role: 'user' | 'assistant';
|
role: 'user' | 'assistant';
|
||||||
@@ -45,17 +45,7 @@
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Ensure the provider is configured with current credentials before chatting
|
// Ensure the provider is configured with current credentials before chatting
|
||||||
const s = $settings;
|
await configureAIProvider($settings);
|
||||||
const configMap: Record<string, Record<string, string>> = {
|
|
||||||
openai: { api_key: s.openai_api_key, model: s.openai_model },
|
|
||||||
anthropic: { api_key: s.anthropic_api_key, model: s.anthropic_model },
|
|
||||||
litellm: { api_key: s.litellm_api_key, api_base: s.litellm_api_base, model: s.litellm_model },
|
|
||||||
local: { model: s.local_model_path, base_url: 'http://localhost:8080' },
|
|
||||||
};
|
|
||||||
const config = configMap[s.ai_provider];
|
|
||||||
if (config) {
|
|
||||||
await invoke('ai_configure', { provider: s.ai_provider, config });
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await invoke<{ response: string }>('ai_chat', {
|
const result = await invoke<{ response: string }>('ai_chat', {
|
||||||
messages: chatMessages,
|
messages: chatMessages,
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ export async function configureAIProvider(s: AppSettings): Promise<void> {
|
|||||||
openai: { api_key: s.openai_api_key, model: s.openai_model },
|
openai: { api_key: s.openai_api_key, model: s.openai_model },
|
||||||
anthropic: { api_key: s.anthropic_api_key, model: s.anthropic_model },
|
anthropic: { api_key: s.anthropic_api_key, model: s.anthropic_model },
|
||||||
litellm: { api_key: s.litellm_api_key, api_base: s.litellm_api_base, model: s.litellm_model },
|
litellm: { api_key: s.litellm_api_key, api_base: s.litellm_api_base, model: s.litellm_model },
|
||||||
local: { model: s.ollama_model, base_url: s.ollama_url + '/v1' },
|
local: { model: s.ollama_model, base_url: s.ollama_url.replace(/\/+$/, '') + '/v1' },
|
||||||
};
|
};
|
||||||
const config = configMap[s.ai_provider];
|
const config = configMap[s.ai_provider];
|
||||||
if (config) {
|
if (config) {
|
||||||
|
|||||||
Reference in New Issue
Block a user