Files
voice-to-notes/python/voice_to_notes/services/ai_provider.py
Josh Knapp d67625cd5a Phase 5: AI provider system with local and cloud support
- Implement AIProvider base interface with chat() and is_available()
- Add LocalProvider connecting to bundled llama-server via OpenAI SDK
- Add OpenAIProvider for direct OpenAI API access
- Add AnthropicProvider for Anthropic Claude API
- Add LiteLLMProvider for multi-provider gateway
- Build AIProviderService with provider routing, auto-selection,
  and transcript context injection
- Add ai.chat IPC handler supporting chat, list_providers, set_provider,
  and configure actions
- Add ai_chat, ai_list_providers, ai_configure Tauri commands
- Build interactive AIChatPanel with message history, quick actions
  (Summarize, Action Items), and transcript context awareness
- Tests: 30 Python, 6 Rust, 0 Svelte errors

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-26 16:25:10 -08:00

105 lines
3.7 KiB
Python

"""AI provider service — routes requests to configured provider."""
from __future__ import annotations
import sys
from typing import Any
from voice_to_notes.providers.base import AIProvider
class AIProviderService:
"""Manages AI provider selection and routes chat requests."""
def __init__(self) -> None:
self._providers: dict[str, AIProvider] = {}
self._active_provider: str | None = None
def register_provider(self, name: str, provider: AIProvider) -> None:
"""Register an AI provider."""
self._providers[name] = provider
def set_active(self, name: str) -> None:
"""Set the active provider by name."""
if name not in self._providers:
raise ValueError(f"Unknown provider: {name}. Available: {list(self._providers.keys())}")
self._active_provider = name
def get_active(self) -> AIProvider | None:
"""Get the currently active provider."""
if self._active_provider:
return self._providers.get(self._active_provider)
# Auto-select first available provider
for name, provider in self._providers.items():
if provider.is_available():
self._active_provider = name
return provider
return None
def list_providers(self) -> list[dict[str, Any]]:
"""List all registered providers with their status."""
return [
{
"name": name,
"display_name": provider.name,
"available": provider.is_available(),
"active": name == self._active_provider,
}
for name, provider in self._providers.items()
]
def chat(
self,
messages: list[dict[str, str]],
transcript_context: str = "",
**kwargs: Any,
) -> str:
"""Send a chat request to the active provider.
Automatically prepends transcript context as a system message if provided.
"""
provider = self.get_active()
if provider is None:
raise RuntimeError(
"No AI provider available. Configure a provider in settings or start the local AI server."
)
# Build messages with transcript context
full_messages: list[dict[str, str]] = []
if transcript_context:
full_messages.append({
"role": "system",
"content": (
"You are a helpful assistant analyzing a transcript. "
"Here is the transcript for context:\n\n"
f"{transcript_context}\n\n"
"Answer the user's questions about this transcript. "
"Be concise and helpful."
),
})
full_messages.extend(messages)
print(
f"[sidecar] AI chat via {provider.name}, {len(full_messages)} messages",
file=sys.stderr,
flush=True,
)
return provider.chat(full_messages, **kwargs)
def create_default_service() -> AIProviderService:
"""Create an AIProviderService with all supported providers registered."""
from voice_to_notes.providers.anthropic_provider import AnthropicProvider
from voice_to_notes.providers.litellm_provider import LiteLLMProvider
from voice_to_notes.providers.local_provider import LocalProvider
from voice_to_notes.providers.openai_provider import OpenAIProvider
service = AIProviderService()
service.register_provider("local", LocalProvider())
service.register_provider("openai", OpenAIProvider())
service.register_provider("anthropic", AnthropicProvider())
service.register_provider("litellm", LiteLLMProvider())
return service