Phase 5: AI provider system with local and cloud support
- Implement AIProvider base interface with chat() and is_available() - Add LocalProvider connecting to bundled llama-server via OpenAI SDK - Add OpenAIProvider for direct OpenAI API access - Add AnthropicProvider for Anthropic Claude API - Add LiteLLMProvider for multi-provider gateway - Build AIProviderService with provider routing, auto-selection, and transcript context injection - Add ai.chat IPC handler supporting chat, list_providers, set_provider, and configure actions - Add ai_chat, ai_list_providers, ai_configure Tauri commands - Build interactive AIChatPanel with message history, quick actions (Summarize, Action Items), and transcript context awareness - Tests: 30 Python, 6 Rust, 0 Svelte errors Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -135,6 +135,83 @@ def make_export_handler() -> HandlerFunc:
|
||||
return handler
|
||||
|
||||
|
||||
def make_ai_chat_handler() -> HandlerFunc:
|
||||
"""Create an AI chat handler with persistent AIProviderService."""
|
||||
from voice_to_notes.services.ai_provider import create_default_service
|
||||
|
||||
service = create_default_service()
|
||||
|
||||
def handler(msg: IPCMessage) -> IPCMessage:
|
||||
payload = msg.payload
|
||||
action = payload.get("action", "chat")
|
||||
|
||||
if action == "list_providers":
|
||||
return IPCMessage(
|
||||
id=msg.id,
|
||||
type="ai.providers",
|
||||
payload={"providers": service.list_providers()},
|
||||
)
|
||||
|
||||
if action == "set_provider":
|
||||
service.set_active(payload["provider"])
|
||||
return IPCMessage(
|
||||
id=msg.id,
|
||||
type="ai.provider_set",
|
||||
payload={"provider": payload["provider"]},
|
||||
)
|
||||
|
||||
if action == "configure":
|
||||
# Re-create a provider with custom settings
|
||||
provider_name = payload.get("provider", "")
|
||||
config = payload.get("config", {})
|
||||
if provider_name == "local":
|
||||
from voice_to_notes.providers.local_provider import LocalProvider
|
||||
|
||||
service.register_provider("local", LocalProvider(
|
||||
base_url=config.get("base_url", "http://localhost:8080"),
|
||||
model=config.get("model", "local"),
|
||||
))
|
||||
elif provider_name == "openai":
|
||||
from voice_to_notes.providers.openai_provider import OpenAIProvider
|
||||
|
||||
service.register_provider("openai", OpenAIProvider(
|
||||
api_key=config.get("api_key"),
|
||||
model=config.get("model", "gpt-4o-mini"),
|
||||
))
|
||||
elif provider_name == "anthropic":
|
||||
from voice_to_notes.providers.anthropic_provider import AnthropicProvider
|
||||
|
||||
service.register_provider("anthropic", AnthropicProvider(
|
||||
api_key=config.get("api_key"),
|
||||
model=config.get("model", "claude-sonnet-4-6"),
|
||||
))
|
||||
elif provider_name == "litellm":
|
||||
from voice_to_notes.providers.litellm_provider import LiteLLMProvider
|
||||
|
||||
service.register_provider("litellm", LiteLLMProvider(
|
||||
model=config.get("model", "gpt-4o-mini"),
|
||||
))
|
||||
return IPCMessage(
|
||||
id=msg.id,
|
||||
type="ai.configured",
|
||||
payload={"provider": provider_name},
|
||||
)
|
||||
|
||||
# Default: chat
|
||||
response = service.chat(
|
||||
messages=payload.get("messages", []),
|
||||
transcript_context=payload.get("transcript_context", ""),
|
||||
**{k: v for k, v in payload.items() if k not in ("action", "messages", "transcript_context")},
|
||||
)
|
||||
return IPCMessage(
|
||||
id=msg.id,
|
||||
type="ai.response",
|
||||
payload={"response": response},
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
def hardware_detect_handler(msg: IPCMessage) -> IPCMessage:
|
||||
"""Detect hardware capabilities and return recommendations."""
|
||||
from voice_to_notes.hardware.detect import detect_hardware
|
||||
|
||||
Reference in New Issue
Block a user