Files
voice-to-notes/python/voice_to_notes/hardware/detect.py
Josh Knapp 48fe41b064 Phase 2: Core transcription pipeline and audio playback
- Implement faster-whisper TranscribeService with word-level timestamps,
  progress reporting, and hardware auto-detection
- Wire up Rust SidecarManager for Python process lifecycle (spawn, IPC, shutdown)
- Add transcribe_file Tauri command bridging frontend to Python sidecar
- Integrate wavesurfer.js WaveformPlayer with play/pause, skip, seek controls
- Build TranscriptEditor with word-level click-to-seek and active highlighting
- Connect file import flow: prompt → asset load → transcribe → display
- Add typed tauri-bridge service with TranscriptionResult interface
- Add Python tests for hardware detection and transcription result formatting

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-26 15:53:09 -08:00

76 lines
2.2 KiB
Python

"""GPU/CPU detection and VRAM estimation."""
from __future__ import annotations
import os
import sys
from dataclasses import dataclass
@dataclass
class HardwareInfo:
"""Detected hardware capabilities."""
has_cuda: bool = False
cuda_device_name: str = ""
vram_mb: int = 0
ram_mb: int = 0
cpu_cores: int = 0
recommended_model: str = "base"
recommended_device: str = "cpu"
recommended_compute_type: str = "int8"
def detect_hardware() -> HardwareInfo:
"""Detect available hardware and recommend model configuration."""
info = HardwareInfo()
# CPU info
info.cpu_cores = os.cpu_count() or 1
# RAM info
try:
with open("/proc/meminfo") as f:
for line in f:
if line.startswith("MemTotal:"):
# Value is in kB
info.ram_mb = int(line.split()[1]) // 1024
break
except (FileNotFoundError, ValueError):
pass
# CUDA detection
try:
import torch
if torch.cuda.is_available():
info.has_cuda = True
info.cuda_device_name = torch.cuda.get_device_name(0)
info.vram_mb = torch.cuda.get_device_properties(0).total_mem // (1024 * 1024)
except ImportError:
print("[sidecar] torch not available, GPU detection skipped", file=sys.stderr, flush=True)
# Model recommendation based on hardware
if info.has_cuda and info.vram_mb >= 8000:
info.recommended_model = "large-v3-turbo"
info.recommended_device = "cuda"
info.recommended_compute_type = "int8"
elif info.has_cuda and info.vram_mb >= 4000:
info.recommended_model = "medium"
info.recommended_device = "cuda"
info.recommended_compute_type = "int8"
elif info.ram_mb >= 16000:
info.recommended_model = "medium"
info.recommended_device = "cpu"
info.recommended_compute_type = "int8"
elif info.ram_mb >= 8000:
info.recommended_model = "small"
info.recommended_device = "cpu"
info.recommended_compute_type = "int8"
else:
info.recommended_model = "base"
info.recommended_device = "cpu"
info.recommended_compute_type = "int8"
return info