Phase 5: AI provider system with local and cloud support
- Implement AIProvider base interface with chat() and is_available() - Add LocalProvider connecting to bundled llama-server via OpenAI SDK - Add OpenAIProvider for direct OpenAI API access - Add AnthropicProvider for Anthropic Claude API - Add LiteLLMProvider for multi-provider gateway - Build AIProviderService with provider routing, auto-selection, and transcript context injection - Add ai.chat IPC handler supporting chat, list_providers, set_provider, and configure actions - Add ai_chat, ai_list_providers, ai_configure Tauri commands - Build interactive AIChatPanel with message history, quick actions (Summarize, Action Items), and transcript context awareness - Tests: 30 Python, 6 Rust, 0 Svelte errors Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -2,4 +2,68 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# TODO: Implement Anthropic provider
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from voice_to_notes.providers.base import AIProvider
|
||||
|
||||
|
||||
class AnthropicProvider(AIProvider):
|
||||
"""Connects to the Anthropic API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api_key: str | None = None,
|
||||
model: str = "claude-sonnet-4-6",
|
||||
) -> None:
|
||||
self._api_key = api_key or os.environ.get("ANTHROPIC_API_KEY", "")
|
||||
self._model = model
|
||||
self._client: Any = None
|
||||
|
||||
def _ensure_client(self) -> Any:
|
||||
if self._client is not None:
|
||||
return self._client
|
||||
|
||||
if not self._api_key:
|
||||
raise RuntimeError(
|
||||
"Anthropic API key not configured. Set ANTHROPIC_API_KEY or provide it in settings."
|
||||
)
|
||||
|
||||
try:
|
||||
import anthropic
|
||||
|
||||
self._client = anthropic.Anthropic(api_key=self._api_key)
|
||||
except ImportError:
|
||||
raise RuntimeError("anthropic package is required. Install with: pip install anthropic")
|
||||
return self._client
|
||||
|
||||
def chat(self, messages: list[dict[str, str]], **kwargs: Any) -> str:
|
||||
client = self._ensure_client()
|
||||
|
||||
# Anthropic expects a system message separately
|
||||
system_msg = ""
|
||||
chat_messages = []
|
||||
for msg in messages:
|
||||
if msg.get("role") == "system":
|
||||
system_msg = msg.get("content", "")
|
||||
else:
|
||||
chat_messages.append(msg)
|
||||
|
||||
create_kwargs: dict[str, Any] = {
|
||||
"model": kwargs.get("model", self._model),
|
||||
"messages": chat_messages,
|
||||
"max_tokens": kwargs.get("max_tokens", 2048),
|
||||
}
|
||||
if system_msg:
|
||||
create_kwargs["system"] = system_msg
|
||||
|
||||
response = client.messages.create(**create_kwargs)
|
||||
# Anthropic returns content blocks
|
||||
return "".join(block.text for block in response.content if hasattr(block, "text"))
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return bool(self._api_key)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "Anthropic"
|
||||
|
||||
Reference in New Issue
Block a user