Files
voice-to-notes/python/voice_to_notes/providers/openai_provider.py
Josh Knapp d67625cd5a Phase 5: AI provider system with local and cloud support
- Implement AIProvider base interface with chat() and is_available()
- Add LocalProvider connecting to bundled llama-server via OpenAI SDK
- Add OpenAIProvider for direct OpenAI API access
- Add AnthropicProvider for Anthropic Claude API
- Add LiteLLMProvider for multi-provider gateway
- Build AIProviderService with provider routing, auto-selection,
  and transcript context injection
- Add ai.chat IPC handler supporting chat, list_providers, set_provider,
  and configure actions
- Add ai_chat, ai_list_providers, ai_configure Tauri commands
- Build interactive AIChatPanel with message history, quick actions
  (Summarize, Action Items), and transcript context awareness
- Tests: 30 Python, 6 Rust, 0 Svelte errors

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-26 16:25:10 -08:00

54 lines
1.5 KiB
Python

"""OpenAI provider — direct OpenAI SDK integration."""
from __future__ import annotations
import os
from typing import Any
from voice_to_notes.providers.base import AIProvider
class OpenAIProvider(AIProvider):
"""Connects to the OpenAI API."""
def __init__(
self,
api_key: str | None = None,
model: str = "gpt-4o-mini",
) -> None:
self._api_key = api_key or os.environ.get("OPENAI_API_KEY", "")
self._model = model
self._client: Any = None
def _ensure_client(self) -> Any:
if self._client is not None:
return self._client
if not self._api_key:
raise RuntimeError("OpenAI API key not configured. Set OPENAI_API_KEY or provide it in settings.")
try:
from openai import OpenAI
self._client = OpenAI(api_key=self._api_key)
except ImportError:
raise RuntimeError("openai package is required. Install with: pip install openai")
return self._client
def chat(self, messages: list[dict[str, str]], **kwargs: Any) -> str:
client = self._ensure_client()
response = client.chat.completions.create(
model=kwargs.get("model", self._model),
messages=messages,
temperature=kwargs.get("temperature", 0.7),
max_tokens=kwargs.get("max_tokens", 2048),
)
return response.choices[0].message.content or ""
def is_available(self) -> bool:
return bool(self._api_key)
@property
def name(self) -> str:
return "OpenAI"