- Implement LlamaManager in Rust for llama-server lifecycle: spawn with port allocation, health check, clean shutdown on Drop, model listing - Add llama_start/stop/status/list_models Tauri commands - Add load_settings/save_settings commands with JSON persistence - Build SettingsModal with tabs for Transcription, AI Provider, Local AI settings (model size, device, language, API keys, provider selection) - Wire settings into pipeline calls (model, device, language, skip diarization) - Configure Tauri packaging: asset protocol for local audio files, CSP policy, bundle metadata, Linux .deb/.AppImage and Windows .msi config - Add keyboard shortcuts: Space (play/pause), Ctrl+O (import), Ctrl+, (settings), Escape (close menus/modals) - Close export dropdown on outside click - Tests: 30 Python, 6 Rust, 0 Svelte errors Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
92 lines
2.5 KiB
Rust
92 lines
2.5 KiB
Rust
use serde_json::{json, Value};
|
|
|
|
use crate::sidecar::messages::IPCMessage;
|
|
use crate::sidecar::SidecarManager;
|
|
|
|
fn get_sidecar() -> Result<SidecarManager, String> {
|
|
let python_path = std::env::current_dir()
|
|
.map_err(|e| e.to_string())?
|
|
.join("../python")
|
|
.canonicalize()
|
|
.map_err(|e| format!("Cannot find python directory: {e}"))?;
|
|
|
|
let manager = SidecarManager::new();
|
|
manager.start(&python_path.to_string_lossy())?;
|
|
Ok(manager)
|
|
}
|
|
|
|
/// Send a chat message to the AI provider via the Python sidecar.
|
|
#[tauri::command]
|
|
pub fn ai_chat(
|
|
messages: Value,
|
|
transcript_context: Option<String>,
|
|
provider: Option<String>,
|
|
) -> Result<Value, String> {
|
|
let manager = get_sidecar()?;
|
|
|
|
let request_id = uuid::Uuid::new_v4().to_string();
|
|
let payload = json!({
|
|
"action": "chat",
|
|
"messages": messages,
|
|
"transcript_context": transcript_context.unwrap_or_default(),
|
|
});
|
|
|
|
// If a specific provider is requested, set it first
|
|
if let Some(p) = provider {
|
|
let set_msg = IPCMessage::new(
|
|
&uuid::Uuid::new_v4().to_string(),
|
|
"ai.chat",
|
|
json!({ "action": "set_provider", "provider": p }),
|
|
);
|
|
let _ = manager.send_and_receive(&set_msg)?;
|
|
}
|
|
|
|
let msg = IPCMessage::new(&request_id, "ai.chat", payload);
|
|
let response = manager.send_and_receive(&msg)?;
|
|
|
|
if response.msg_type == "error" {
|
|
return Err(format!(
|
|
"AI error: {}",
|
|
response.payload.get("message").and_then(|v| v.as_str()).unwrap_or("unknown")
|
|
));
|
|
}
|
|
|
|
Ok(response.payload)
|
|
}
|
|
|
|
/// List available AI providers.
|
|
#[tauri::command]
|
|
pub fn ai_list_providers() -> Result<Value, String> {
|
|
let manager = get_sidecar()?;
|
|
|
|
let request_id = uuid::Uuid::new_v4().to_string();
|
|
let msg = IPCMessage::new(
|
|
&request_id,
|
|
"ai.chat",
|
|
json!({ "action": "list_providers" }),
|
|
);
|
|
|
|
let response = manager.send_and_receive(&msg)?;
|
|
Ok(response.payload)
|
|
}
|
|
|
|
/// Configure an AI provider with API key/settings.
|
|
#[tauri::command]
|
|
pub fn ai_configure(provider: String, config: Value) -> Result<Value, String> {
|
|
let manager = get_sidecar()?;
|
|
|
|
let request_id = uuid::Uuid::new_v4().to_string();
|
|
let msg = IPCMessage::new(
|
|
&request_id,
|
|
"ai.chat",
|
|
json!({
|
|
"action": "configure",
|
|
"provider": provider,
|
|
"config": config,
|
|
}),
|
|
);
|
|
|
|
let response = manager.send_and_receive(&msg)?;
|
|
Ok(response.payload)
|
|
}
|