- Implement AIProvider base interface with chat() and is_available() - Add LocalProvider connecting to bundled llama-server via OpenAI SDK - Add OpenAIProvider for direct OpenAI API access - Add AnthropicProvider for Anthropic Claude API - Add LiteLLMProvider for multi-provider gateway - Build AIProviderService with provider routing, auto-selection, and transcript context injection - Add ai.chat IPC handler supporting chat, list_providers, set_provider, and configure actions - Add ai_chat, ai_list_providers, ai_configure Tauri commands - Build interactive AIChatPanel with message history, quick actions (Summarize, Action Items), and transcript context awareness - Tests: 30 Python, 6 Rust, 0 Svelte errors Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
27 lines
645 B
Python
27 lines
645 B
Python
"""Abstract base class for AI providers."""
|
|
|
|
from __future__ import annotations
|
|
|
|
from abc import ABC, abstractmethod
|
|
from typing import Any
|
|
|
|
|
|
class AIProvider(ABC):
|
|
"""Base interface for all AI providers."""
|
|
|
|
@abstractmethod
|
|
def chat(self, messages: list[dict[str, str]], **kwargs: Any) -> str:
|
|
"""Send a chat completion request and return the full response text."""
|
|
...
|
|
|
|
@abstractmethod
|
|
def is_available(self) -> bool:
|
|
"""Check if this provider is configured and available."""
|
|
...
|
|
|
|
@property
|
|
@abstractmethod
|
|
def name(self) -> str:
|
|
"""Provider display name."""
|
|
...
|