Cross-platform distribution, UI improvements, and performance optimizations

- PyInstaller frozen sidecar: spec file, build script, and ffmpeg path resolver
  for self-contained distribution without Python prerequisites
- Dual-mode sidecar launcher: frozen binary (production) with dev mode fallback
- Parallel transcription + diarization pipeline (~30-40% faster)
- GPU auto-detection for diarization (CUDA when available)
- Async run_pipeline command for real-time progress event delivery
- Web Audio API backend for instant playback and seeking
- OpenAI-compatible provider replacing LiteLLM client-side routing
- Cross-platform RAM detection (Linux/macOS/Windows)
- Settings: speaker count hint, token reveal toggles, dark dropdown styling
- Loading splash screen, flexbox layout fix for viewport overflow
- Gitea Actions CI/CD pipeline (Linux, Windows, macOS ARM)
- Updated README and CLAUDE.md documentation

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Claude
2026-03-20 21:33:43 -07:00
parent 42ccd3e21d
commit 58faa83cb3
27 changed files with 1301 additions and 283 deletions

View File

@@ -73,7 +73,7 @@ pub fn download_diarize_model(hf_token: String) -> Result<Value, String> {
/// Run the full transcription + diarization pipeline via the Python sidecar.
#[tauri::command]
pub fn run_pipeline(
pub async fn run_pipeline(
app: AppHandle,
file_path: String,
model: Option<String>,
@@ -106,25 +106,34 @@ pub fn run_pipeline(
}),
);
let response = manager.send_and_receive_with_progress(&msg, |msg| {
let event_name = match msg.msg_type.as_str() {
"pipeline.segment" => "pipeline-segment",
"pipeline.speaker_update" => "pipeline-speaker-update",
_ => "pipeline-progress",
};
let _ = app.emit(event_name, &msg.payload);
})?;
// Run the blocking sidecar I/O on a separate thread so the async runtime
// can deliver emitted events to the webview while processing is ongoing.
let app_handle = app.clone();
tauri::async_runtime::spawn_blocking(move || {
let response = manager.send_and_receive_with_progress(&msg, |msg| {
let event_name = match msg.msg_type.as_str() {
"pipeline.segment" => "pipeline-segment",
"pipeline.speaker_update" => "pipeline-speaker-update",
_ => "pipeline-progress",
};
if let Err(e) = app_handle.emit(event_name, &msg.payload) {
eprintln!("[sidecar-rs] Failed to emit {event_name}: {e}");
}
})?;
if response.msg_type == "error" {
return Err(format!(
"Pipeline error: {}",
response
.payload
.get("message")
.and_then(|v| v.as_str())
.unwrap_or("unknown")
));
}
if response.msg_type == "error" {
return Err(format!(
"Pipeline error: {}",
response
.payload
.get("message")
.and_then(|v| v.as_str())
.unwrap_or("unknown")
));
}
Ok(response.payload)
Ok(response.payload)
})
.await
.map_err(|e| format!("Pipeline task failed: {e}"))?
}