2025-12-25 18:48:23 -08:00
|
|
|
[project]
|
|
|
|
|
name = "local-transcription"
|
|
|
|
|
version = "0.1.0"
|
|
|
|
|
description = "A standalone desktop application for real-time speech-to-text transcription using Whisper models"
|
|
|
|
|
readme = "README.md"
|
|
|
|
|
requires-python = ">=3.9"
|
|
|
|
|
license = {text = "MIT"}
|
|
|
|
|
authors = [
|
|
|
|
|
{name = "Your Name", email = "your.email@example.com"}
|
|
|
|
|
]
|
|
|
|
|
keywords = ["transcription", "speech-to-text", "whisper", "streaming", "obs"]
|
|
|
|
|
|
|
|
|
|
dependencies = [
|
|
|
|
|
"numpy>=1.24.0",
|
|
|
|
|
"pyyaml>=6.0",
|
|
|
|
|
"sounddevice>=0.4.6",
|
|
|
|
|
"scipy>=1.10.0",
|
|
|
|
|
"torch>=2.0.0",
|
|
|
|
|
"PySide6>=6.6.0",
|
2025-12-28 18:48:29 -08:00
|
|
|
# RealtimeSTT for advanced VAD-based transcription
|
|
|
|
|
"RealtimeSTT>=0.3.0",
|
2025-12-26 11:57:50 -08:00
|
|
|
# Web server (always-running for OBS integration)
|
|
|
|
|
"fastapi>=0.104.0",
|
|
|
|
|
"uvicorn>=0.24.0",
|
|
|
|
|
"websockets>=12.0",
|
|
|
|
|
# Server sync client
|
|
|
|
|
"requests>=2.31.0",
|
2025-12-25 18:48:23 -08:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
[project.optional-dependencies]
|
2025-12-26 11:57:50 -08:00
|
|
|
# Kept for backwards compatibility, but server deps are now in main dependencies
|
2025-12-25 18:48:23 -08:00
|
|
|
server = [
|
|
|
|
|
"fastapi>=0.104.0",
|
|
|
|
|
"uvicorn>=0.24.0",
|
|
|
|
|
"websockets>=12.0",
|
|
|
|
|
"requests>=2.31.0",
|
|
|
|
|
]
|
|
|
|
|
dev = [
|
|
|
|
|
"pytest>=7.4.0",
|
|
|
|
|
"black>=23.0.0",
|
|
|
|
|
"ruff>=0.1.0",
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
[project.scripts]
|
|
|
|
|
local-transcription = "main:main"
|
|
|
|
|
|
|
|
|
|
[build-system]
|
|
|
|
|
requires = ["hatchling"]
|
|
|
|
|
build-backend = "hatchling.build"
|
|
|
|
|
|
|
|
|
|
[tool.hatch.build.targets.wheel]
|
|
|
|
|
packages = ["client", "gui"]
|
|
|
|
|
|
2025-12-26 12:08:42 -08:00
|
|
|
[dependency-groups]
|
|
|
|
|
dev = [
|
2025-12-25 18:48:23 -08:00
|
|
|
"pyinstaller>=6.17.0",
|
|
|
|
|
]
|
|
|
|
|
|
2025-12-26 12:13:40 -08:00
|
|
|
# Add PyTorch CUDA index as additional source
|
2025-12-26 12:08:42 -08:00
|
|
|
# CUDA builds work on both GPU and CPU systems (fallback to CPU if no GPU)
|
2025-12-26 12:16:08 -08:00
|
|
|
# Using 'explicit = true' means only packages we explicitly specify use this index
|
2025-12-26 12:08:42 -08:00
|
|
|
[[tool.uv.index]]
|
2025-12-26 12:13:40 -08:00
|
|
|
name = "pytorch-cu121"
|
2025-12-26 12:08:42 -08:00
|
|
|
url = "https://download.pytorch.org/whl/cu121"
|
2025-12-26 12:16:08 -08:00
|
|
|
explicit = true
|
2025-12-26 12:13:40 -08:00
|
|
|
|
2025-12-26 12:16:08 -08:00
|
|
|
# Tell uv to get torch, torchvision, and torchaudio from the PyTorch CUDA index
|
|
|
|
|
# All other packages come from PyPI
|
2025-12-26 12:13:40 -08:00
|
|
|
[tool.uv.sources]
|
|
|
|
|
torch = { index = "pytorch-cu121" }
|
2025-12-26 12:16:08 -08:00
|
|
|
torchvision = { index = "pytorch-cu121" }
|
|
|
|
|
torchaudio = { index = "pytorch-cu121" }
|
2025-12-26 12:08:42 -08:00
|
|
|
|
2025-12-28 19:42:13 -08:00
|
|
|
# Override enum34 dependency to only install on Python < 3.4
|
|
|
|
|
# (which effectively never happens since we require Python >= 3.9)
|
2025-12-28 19:44:14 -08:00
|
|
|
#
|
|
|
|
|
# Background: RealtimeSTT depends on pvporcupine==1.9.5 (the last fully open-source
|
|
|
|
|
# version before 2.0+ required an access key). pvporcupine 1.9.5 depends on enum34,
|
|
|
|
|
# which is an obsolete Python 2.7/3.3 backport that's incompatible with PyInstaller.
|
|
|
|
|
#
|
|
|
|
|
# Since enum is part of Python stdlib since 3.4, and we don't use wake word features
|
|
|
|
|
# from pvporcupine (it's just an indirect dependency), we can safely skip enum34.
|
2025-12-28 19:42:13 -08:00
|
|
|
[tool.uv]
|
|
|
|
|
override-dependencies = [
|
|
|
|
|
"enum34; python_version < '3.4'"
|
|
|
|
|
]
|
2025-12-28 19:06:33 -08:00
|
|
|
|
2025-12-25 18:48:23 -08:00
|
|
|
[tool.ruff]
|
|
|
|
|
line-length = 100
|
|
|
|
|
target-version = "py39"
|
|
|
|
|
|
|
|
|
|
[tool.black]
|
|
|
|
|
line-length = 100
|
|
|
|
|
target-version = ["py39"]
|