@echo off REM Build script for Windows with CUDA support echo Building Local Transcription with CUDA support... echo ================================================== echo. echo This will create a build that supports both CPU and CUDA GPUs. echo The executable will be larger (~2-3GB) but will work on any system. echo. set /p INSTALL_CUDA="Install PyTorch with CUDA support? (y/n) " if /i "%INSTALL_CUDA%"=="y" ( echo Installing PyTorch with CUDA 12.1 support... REM Uninstall CPU-only version if present REM Note: uv doesn't support -y flag, it uninstalls without confirmation uv pip uninstall torch 2>nul REM Install CUDA-enabled PyTorch REM This installs PyTorch with bundled CUDA runtime uv pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121 echo CUDA-enabled PyTorch installed echo. ) REM Clean previous builds echo Cleaning previous builds... if exist build rmdir /s /q build if exist dist rmdir /s /q dist REM Remove enum34 if present (incompatible with PyInstaller) echo Removing enum34 (if present)... uv pip uninstall -q enum34 2>nul REM Build with PyInstaller echo Running PyInstaller... uv run pyinstaller local-transcription.spec REM Check if build succeeded if exist "dist\LocalTranscription" ( echo. echo Build successful! echo Executable location: dist\LocalTranscription\LocalTranscription.exe echo. echo CUDA Support: YES (falls back to CPU if CUDA not available^) echo. echo To run the application: echo cd dist\LocalTranscription echo LocalTranscription.exe echo. echo To create a distributable package: echo - Compress the dist\LocalTranscription folder to a ZIP file echo - Name it: LocalTranscription-Windows-CUDA.zip echo. echo Note: This build will work on systems with or without NVIDIA GPUs. ) else ( echo. echo Build failed! exit /b 1 )