Rewrite frontend to Tauri v2 + Svelte 5 for cross-platform support #4
414
.gitea/workflows/build-sidecar.yml
Normal file
414
.gitea/workflows/build-sidecar.yml
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
name: Build Sidecars
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'client/**'
|
||||||
|
- 'server/**'
|
||||||
|
- 'backend/**'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'local-transcription-headless.spec'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-sidecar-version:
|
||||||
|
name: Bump sidecar version and tag
|
||||||
|
if: "!contains(github.event.head_commit.message, '[skip ci]')"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.bump.outputs.version }}
|
||||||
|
tag: ${{ steps.bump.outputs.tag }}
|
||||||
|
has_changes: ${{ steps.check_changes.outputs.has_changes }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
|
- name: Check for backend changes
|
||||||
|
id: check_changes
|
||||||
|
run: |
|
||||||
|
# If triggered by workflow_dispatch, always build
|
||||||
|
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||||
|
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
# Check if relevant files changed in this commit
|
||||||
|
CHANGED=$(git diff --name-only HEAD~1 HEAD -- client/ server/ backend/ pyproject.toml local-transcription-headless.spec 2>/dev/null || echo "")
|
||||||
|
if [ -n "$CHANGED" ]; then
|
||||||
|
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "Backend changes detected: $CHANGED"
|
||||||
|
else
|
||||||
|
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "No backend changes detected, skipping sidecar build"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Configure git
|
||||||
|
if: steps.check_changes.outputs.has_changes == 'true'
|
||||||
|
run: |
|
||||||
|
git config user.name "Gitea Actions"
|
||||||
|
git config user.email "actions@gitea.local"
|
||||||
|
|
||||||
|
- name: Bump sidecar patch version
|
||||||
|
if: steps.check_changes.outputs.has_changes == 'true'
|
||||||
|
id: bump
|
||||||
|
run: |
|
||||||
|
# Read current version from pyproject.toml
|
||||||
|
CURRENT=$(grep '^version = ' pyproject.toml | head -1 | sed 's/version = "\(.*\)"/\1/')
|
||||||
|
echo "Current sidecar version: ${CURRENT}"
|
||||||
|
|
||||||
|
# Increment patch number
|
||||||
|
MAJOR=$(echo "${CURRENT}" | cut -d. -f1)
|
||||||
|
MINOR=$(echo "${CURRENT}" | cut -d. -f2)
|
||||||
|
PATCH=$(echo "${CURRENT}" | cut -d. -f3)
|
||||||
|
NEW_PATCH=$((PATCH + 1))
|
||||||
|
NEW_VERSION="${MAJOR}.${MINOR}.${NEW_PATCH}"
|
||||||
|
echo "New sidecar version: ${NEW_VERSION}"
|
||||||
|
|
||||||
|
# Update pyproject.toml
|
||||||
|
sed -i "s/^version = \"${CURRENT}\"/version = \"${NEW_VERSION}\"/" pyproject.toml
|
||||||
|
|
||||||
|
# Update version.py
|
||||||
|
sed -i "s/__version__ = \"${CURRENT}\"/__version__ = \"${NEW_VERSION}\"/" version.py
|
||||||
|
sed -i "s/__version_info__ = .*/__version_info__ = (${MAJOR}, ${MINOR}, ${NEW_PATCH})/" version.py
|
||||||
|
|
||||||
|
echo "version=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
echo "tag=sidecar-v${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Commit and tag
|
||||||
|
if: steps.check_changes.outputs.has_changes == 'true'
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
NEW_VERSION="${{ steps.bump.outputs.version }}"
|
||||||
|
TAG="${{ steps.bump.outputs.tag }}"
|
||||||
|
git add pyproject.toml version.py
|
||||||
|
git commit -m "chore: bump sidecar version to ${NEW_VERSION} [skip ci]"
|
||||||
|
git tag "${TAG}"
|
||||||
|
|
||||||
|
REMOTE_URL=$(git remote get-url origin | sed "s|://|://gitea-actions:${BUILD_TOKEN}@|")
|
||||||
|
git pull --rebase "${REMOTE_URL}" main || true
|
||||||
|
git push "${REMOTE_URL}" HEAD:main
|
||||||
|
git push "${REMOTE_URL}" "${TAG}"
|
||||||
|
|
||||||
|
- name: Create Gitea release
|
||||||
|
if: steps.check_changes.outputs.has_changes == 'true'
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
REPO_API="${GITHUB_SERVER_URL}/api/v1/repos/${GITHUB_REPOSITORY}"
|
||||||
|
TAG="${{ steps.bump.outputs.tag }}"
|
||||||
|
VERSION="${{ steps.bump.outputs.version }}"
|
||||||
|
RELEASE_NAME="Sidecar v${VERSION}"
|
||||||
|
|
||||||
|
curl -s -X POST \
|
||||||
|
-H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"tag_name\": \"${TAG}\", \"name\": \"${RELEASE_NAME}\", \"body\": \"Automated sidecar build.\", \"draft\": false, \"prerelease\": false}" \
|
||||||
|
"${REPO_API}/releases"
|
||||||
|
echo "Created release: ${RELEASE_NAME}"
|
||||||
|
|
||||||
|
# ── Linux sidecar (CUDA + CPU) ──
|
||||||
|
|
||||||
|
build-sidecar-linux:
|
||||||
|
name: Build Sidecar (Linux)
|
||||||
|
needs: bump-sidecar-version
|
||||||
|
if: needs.bump-sidecar-version.outputs.has_changes == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.bump-sidecar-version.outputs.tag }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: |
|
||||||
|
if command -v uv &> /dev/null; then
|
||||||
|
echo "uv already installed: $(uv --version)"
|
||||||
|
else
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
run: uv python install ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y portaudio19-dev
|
||||||
|
|
||||||
|
- name: Build sidecar (CUDA)
|
||||||
|
run: |
|
||||||
|
uv sync
|
||||||
|
uv run pyinstaller local-transcription-headless.spec
|
||||||
|
|
||||||
|
- name: Package sidecar (CUDA)
|
||||||
|
run: |
|
||||||
|
cd dist/local-transcription-backend && zip -r ../../sidecar-linux-x86_64-cuda.zip .
|
||||||
|
|
||||||
|
- name: Build sidecar (CPU)
|
||||||
|
run: |
|
||||||
|
rm -rf dist/local-transcription-backend build/
|
||||||
|
# Install CPU-only PyTorch
|
||||||
|
uv pip install torch torchaudio --index-url https://download.pytorch.org/whl/cpu --force-reinstall
|
||||||
|
uv run pyinstaller local-transcription-headless.spec
|
||||||
|
|
||||||
|
- name: Package sidecar (CPU)
|
||||||
|
run: |
|
||||||
|
cd dist/local-transcription-backend && zip -r ../../sidecar-linux-x86_64-cpu.zip .
|
||||||
|
|
||||||
|
- name: Upload to sidecar release
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y jq
|
||||||
|
REPO_API="${GITHUB_SERVER_URL}/api/v1/repos/${GITHUB_REPOSITORY}"
|
||||||
|
TAG="${{ needs.bump-sidecar-version.outputs.tag }}"
|
||||||
|
|
||||||
|
echo "Waiting for sidecar release ${TAG} to be available..."
|
||||||
|
for i in $(seq 1 30); do
|
||||||
|
RELEASE_JSON=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/tags/${TAG}")
|
||||||
|
RELEASE_ID=$(echo "$RELEASE_JSON" | jq -r '.id // empty')
|
||||||
|
|
||||||
|
if [ -n "${RELEASE_ID}" ] && [ "${RELEASE_ID}" != "null" ]; then
|
||||||
|
echo "Found sidecar release: ${TAG} (ID: ${RELEASE_ID})"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Attempt ${i}/30: Release not ready yet, retrying in 10s..."
|
||||||
|
sleep 10
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "${RELEASE_ID}" ] || [ "${RELEASE_ID}" = "null" ]; then
|
||||||
|
echo "ERROR: Failed to find sidecar release for tag ${TAG} after 30 attempts."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
for file in sidecar-*.zip; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
encoded_name=$(echo "$filename" | sed 's/ /%20/g')
|
||||||
|
echo "Uploading ${filename} ($(du -h "$file" | cut -f1))..."
|
||||||
|
|
||||||
|
ASSET_ID=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets" | jq -r ".[] | select(.name == \"${filename}\") | .id // empty")
|
||||||
|
if [ -n "${ASSET_ID}" ]; then
|
||||||
|
curl -s -X DELETE -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets/${ASSET_ID}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
|
||||||
|
-H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
-T "$file" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets?name=${encoded_name}")
|
||||||
|
echo "Upload response: HTTP ${HTTP_CODE}"
|
||||||
|
done
|
||||||
|
|
||||||
|
# ── Windows sidecar (CUDA + CPU) ──
|
||||||
|
|
||||||
|
build-sidecar-windows:
|
||||||
|
name: Build Sidecar (Windows)
|
||||||
|
needs: bump-sidecar-version
|
||||||
|
if: needs.bump-sidecar-version.outputs.has_changes == 'true'
|
||||||
|
runs-on: windows-latest
|
||||||
|
env:
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.bump-sidecar-version.outputs.tag }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
if (Get-Command uv -ErrorAction SilentlyContinue) {
|
||||||
|
Write-Host "uv already installed: $(uv --version)"
|
||||||
|
} else {
|
||||||
|
irm https://astral.sh/uv/install.ps1 | iex
|
||||||
|
echo "$env:USERPROFILE\.local\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
shell: powershell
|
||||||
|
run: uv python install ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install 7-Zip
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
if (-not (Get-Command 7z -ErrorAction SilentlyContinue)) {
|
||||||
|
choco install 7zip -y
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Build sidecar (CUDA)
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
uv sync
|
||||||
|
uv run pyinstaller local-transcription-headless.spec
|
||||||
|
|
||||||
|
- name: Package sidecar (CUDA)
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
7z a -tzip -mx=5 sidecar-windows-x86_64-cuda.zip .\dist\local-transcription-backend\*
|
||||||
|
|
||||||
|
- name: Build sidecar (CPU)
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
Remove-Item -Recurse -Force dist\local-transcription-backend, build -ErrorAction SilentlyContinue
|
||||||
|
uv pip install torch torchaudio --index-url https://download.pytorch.org/whl/cpu --force-reinstall
|
||||||
|
uv run pyinstaller local-transcription-headless.spec
|
||||||
|
|
||||||
|
- name: Package sidecar (CPU)
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
7z a -tzip -mx=5 sidecar-windows-x86_64-cpu.zip .\dist\local-transcription-backend\*
|
||||||
|
|
||||||
|
- name: Upload to sidecar release
|
||||||
|
shell: powershell
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
$REPO_API = "${{ github.server_url }}/api/v1/repos/${{ github.repository }}"
|
||||||
|
$Headers = @{ "Authorization" = "token $env:BUILD_TOKEN" }
|
||||||
|
$TAG = "${{ needs.bump-sidecar-version.outputs.tag }}"
|
||||||
|
|
||||||
|
Write-Host "Waiting for sidecar release ${TAG} to be available..."
|
||||||
|
$RELEASE_ID = $null
|
||||||
|
|
||||||
|
for ($i = 1; $i -le 30; $i++) {
|
||||||
|
try {
|
||||||
|
$release = Invoke-RestMethod -Uri "${REPO_API}/releases/tags/${TAG}" -Headers $Headers -ErrorAction Stop
|
||||||
|
$RELEASE_ID = $release.id
|
||||||
|
|
||||||
|
if ($RELEASE_ID) {
|
||||||
|
Write-Host "Found sidecar release: ${TAG} (ID: ${RELEASE_ID})"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
Write-Host "Attempt ${i}/30: Release not ready yet, retrying in 10s..."
|
||||||
|
Start-Sleep -Seconds 10
|
||||||
|
}
|
||||||
|
|
||||||
|
if (-not $RELEASE_ID) {
|
||||||
|
Write-Host "ERROR: Failed to find sidecar release for tag ${TAG} after 30 attempts."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
Get-ChildItem -Path . -Filter "sidecar-*.zip" | ForEach-Object {
|
||||||
|
$filename = $_.Name
|
||||||
|
$encodedName = [System.Uri]::EscapeDataString($filename)
|
||||||
|
$size = [math]::Round($_.Length / 1MB, 1)
|
||||||
|
Write-Host "Uploading ${filename} (${size} MB)..."
|
||||||
|
|
||||||
|
try {
|
||||||
|
$assets = Invoke-RestMethod -Uri "${REPO_API}/releases/${RELEASE_ID}/assets" -Headers $Headers
|
||||||
|
$existing = $assets | Where-Object { $_.name -eq $filename }
|
||||||
|
if ($existing) {
|
||||||
|
Invoke-RestMethod -Uri "${REPO_API}/releases/${RELEASE_ID}/assets/$($existing.id)" -Method Delete -Headers $Headers
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
$uploadUrl = "${REPO_API}/releases/${RELEASE_ID}/assets?name=${encodedName}"
|
||||||
|
$result = curl.exe --fail --silent --show-error `
|
||||||
|
-X POST `
|
||||||
|
-H "Authorization: token $env:BUILD_TOKEN" `
|
||||||
|
-H "Content-Type: application/octet-stream" `
|
||||||
|
-T "$($_.FullName)" `
|
||||||
|
"$uploadUrl" 2>&1
|
||||||
|
if ($LASTEXITCODE -eq 0) {
|
||||||
|
Write-Host "Upload successful: ${filename}"
|
||||||
|
} else {
|
||||||
|
Write-Host "WARNING: Upload failed for ${filename}: ${result}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# ── macOS sidecar (CPU only — no CUDA on macOS) ──
|
||||||
|
|
||||||
|
build-sidecar-macos:
|
||||||
|
name: Build Sidecar (macOS)
|
||||||
|
needs: bump-sidecar-version
|
||||||
|
if: needs.bump-sidecar-version.outputs.has_changes == 'true'
|
||||||
|
runs-on: macos-latest
|
||||||
|
env:
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.bump-sidecar-version.outputs.tag }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: |
|
||||||
|
if command -v uv &> /dev/null; then
|
||||||
|
echo "uv already installed: $(uv --version)"
|
||||||
|
else
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
run: uv python install ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: brew install portaudio
|
||||||
|
|
||||||
|
- name: Build sidecar (CPU)
|
||||||
|
run: |
|
||||||
|
# Install CPU-only PyTorch for macOS (MPS support included in default torch)
|
||||||
|
uv sync
|
||||||
|
uv pip install torch torchaudio --index-url https://download.pytorch.org/whl/cpu --force-reinstall
|
||||||
|
uv run pyinstaller local-transcription-headless.spec
|
||||||
|
|
||||||
|
- name: Package sidecar (CPU)
|
||||||
|
run: |
|
||||||
|
cd dist/local-transcription-backend && zip -r ../../sidecar-macos-aarch64-cpu.zip .
|
||||||
|
|
||||||
|
- name: Upload to sidecar release
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
which jq || brew install jq
|
||||||
|
REPO_API="${GITHUB_SERVER_URL}/api/v1/repos/${GITHUB_REPOSITORY}"
|
||||||
|
TAG="${{ needs.bump-sidecar-version.outputs.tag }}"
|
||||||
|
|
||||||
|
echo "Waiting for sidecar release ${TAG} to be available..."
|
||||||
|
for i in $(seq 1 30); do
|
||||||
|
RELEASE_JSON=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/tags/${TAG}")
|
||||||
|
RELEASE_ID=$(echo "$RELEASE_JSON" | jq -r '.id // empty')
|
||||||
|
|
||||||
|
if [ -n "${RELEASE_ID}" ] && [ "${RELEASE_ID}" != "null" ]; then
|
||||||
|
echo "Found sidecar release: ${TAG} (ID: ${RELEASE_ID})"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Attempt ${i}/30: Release not ready yet, retrying in 10s..."
|
||||||
|
sleep 10
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "${RELEASE_ID}" ] || [ "${RELEASE_ID}" = "null" ]; then
|
||||||
|
echo "ERROR: Failed to find sidecar release for tag ${TAG} after 30 attempts."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
for file in sidecar-*.zip; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
encoded_name=$(echo "$filename" | sed 's/ /%20/g')
|
||||||
|
echo "Uploading ${filename} ($(du -h "$file" | cut -f1))..."
|
||||||
|
|
||||||
|
ASSET_ID=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets" | jq -r ".[] | select(.name == \"${filename}\") | .id // empty")
|
||||||
|
if [ -n "${ASSET_ID}" ]; then
|
||||||
|
curl -s -X DELETE -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets/${ASSET_ID}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
|
||||||
|
-H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
-T "$file" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets?name=${encoded_name}")
|
||||||
|
echo "Upload response: HTTP ${HTTP_CODE}"
|
||||||
|
done
|
||||||
300
.gitea/workflows/release.yml
Normal file
300
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-version:
|
||||||
|
name: Bump version and tag
|
||||||
|
if: "!contains(github.event.head_commit.message, '[skip ci]')"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
new_version: ${{ steps.bump.outputs.new_version }}
|
||||||
|
tag: ${{ steps.bump.outputs.tag }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Configure git
|
||||||
|
run: |
|
||||||
|
git config user.name "Gitea Actions"
|
||||||
|
git config user.email "actions@gitea.local"
|
||||||
|
|
||||||
|
- name: Bump patch version
|
||||||
|
id: bump
|
||||||
|
run: |
|
||||||
|
# Read current version from package.json
|
||||||
|
CURRENT=$(grep '"version"' package.json | head -1 | sed 's/.*"version": *"\([^"]*\)".*/\1/')
|
||||||
|
echo "Current version: ${CURRENT}"
|
||||||
|
|
||||||
|
# Increment patch number
|
||||||
|
MAJOR=$(echo "${CURRENT}" | cut -d. -f1)
|
||||||
|
MINOR=$(echo "${CURRENT}" | cut -d. -f2)
|
||||||
|
PATCH=$(echo "${CURRENT}" | cut -d. -f3)
|
||||||
|
NEW_PATCH=$((PATCH + 1))
|
||||||
|
NEW_VERSION="${MAJOR}.${MINOR}.${NEW_PATCH}"
|
||||||
|
echo "New version: ${NEW_VERSION}"
|
||||||
|
|
||||||
|
# Update package.json
|
||||||
|
sed -i "s/\"version\": \"${CURRENT}\"/\"version\": \"${NEW_VERSION}\"/" package.json
|
||||||
|
|
||||||
|
# Update src-tauri/tauri.conf.json
|
||||||
|
sed -i "s/\"version\": \"${CURRENT}\"/\"version\": \"${NEW_VERSION}\"/" src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update src-tauri/Cargo.toml
|
||||||
|
sed -i "s/^version = \"${CURRENT}\"/version = \"${NEW_VERSION}\"/" src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Update version.py
|
||||||
|
sed -i "s/__version__ = \"${CURRENT}\"/__version__ = \"${NEW_VERSION}\"/" version.py
|
||||||
|
sed -i "s/__version_info__ = .*/__version_info__ = (${MAJOR}, ${MINOR}, ${NEW_PATCH})/" version.py
|
||||||
|
|
||||||
|
echo "new_version=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
echo "tag=v${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Commit and tag
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
NEW_VERSION="${{ steps.bump.outputs.new_version }}"
|
||||||
|
git add package.json src-tauri/tauri.conf.json src-tauri/Cargo.toml version.py
|
||||||
|
git commit -m "chore: bump version to ${NEW_VERSION} [skip ci]"
|
||||||
|
git tag "v${NEW_VERSION}"
|
||||||
|
|
||||||
|
REMOTE_URL=$(git remote get-url origin | sed "s|://|://gitea-actions:${BUILD_TOKEN}@|")
|
||||||
|
git pull --rebase "${REMOTE_URL}" main || true
|
||||||
|
git push "${REMOTE_URL}" HEAD:main
|
||||||
|
git push "${REMOTE_URL}" "v${NEW_VERSION}"
|
||||||
|
|
||||||
|
- name: Create Gitea release
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
REPO_API="${GITHUB_SERVER_URL}/api/v1/repos/${GITHUB_REPOSITORY}"
|
||||||
|
TAG="${{ steps.bump.outputs.tag }}"
|
||||||
|
RELEASE_NAME="Local Transcription ${TAG}"
|
||||||
|
|
||||||
|
curl -s -X POST \
|
||||||
|
-H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"tag_name\": \"${TAG}\", \"name\": \"${RELEASE_NAME}\", \"body\": \"Automated build.\", \"draft\": false, \"prerelease\": false}" \
|
||||||
|
"${REPO_API}/releases"
|
||||||
|
echo "Created release: ${RELEASE_NAME}"
|
||||||
|
|
||||||
|
# ── Platform builds (run after version bump) ──
|
||||||
|
|
||||||
|
build-linux:
|
||||||
|
name: Build App (Linux)
|
||||||
|
needs: bump-version
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
NODE_VERSION: "20"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.bump-version.outputs.tag }}
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Rust stable
|
||||||
|
run: |
|
||||||
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable
|
||||||
|
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils rpm
|
||||||
|
|
||||||
|
- name: Install npm dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build Tauri app
|
||||||
|
run: npm run tauri build
|
||||||
|
|
||||||
|
- name: Upload to release
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y jq
|
||||||
|
REPO_API="${GITHUB_SERVER_URL}/api/v1/repos/${GITHUB_REPOSITORY}"
|
||||||
|
TAG="${{ needs.bump-version.outputs.tag }}"
|
||||||
|
echo "Release tag: ${TAG}"
|
||||||
|
|
||||||
|
RELEASE_ID=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/tags/${TAG}" | jq -r '.id // empty')
|
||||||
|
|
||||||
|
if [ -z "${RELEASE_ID}" ] || [ "${RELEASE_ID}" = "null" ]; then
|
||||||
|
echo "ERROR: Failed to find release for tag ${TAG}."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Release ID: ${RELEASE_ID}"
|
||||||
|
|
||||||
|
find src-tauri/target/release/bundle -type f \( -name "*.deb" -o -name "*.rpm" -o -name "*.AppImage" \) | while IFS= read -r file; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
encoded_name=$(echo "$filename" | sed 's/ /%20/g')
|
||||||
|
echo "Uploading ${filename} ($(du -h "$file" | cut -f1))..."
|
||||||
|
|
||||||
|
ASSET_ID=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets" | jq -r ".[] | select(.name == \"${filename}\") | .id // empty")
|
||||||
|
if [ -n "${ASSET_ID}" ]; then
|
||||||
|
curl -s -X DELETE -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets/${ASSET_ID}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
|
||||||
|
-H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
-T "$file" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets?name=${encoded_name}")
|
||||||
|
echo "Upload response: HTTP ${HTTP_CODE}"
|
||||||
|
done
|
||||||
|
|
||||||
|
build-windows:
|
||||||
|
name: Build App (Windows)
|
||||||
|
needs: bump-version
|
||||||
|
runs-on: windows-latest
|
||||||
|
env:
|
||||||
|
NODE_VERSION: "20"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.bump-version.outputs.tag }}
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Rust stable
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
if (Get-Command rustup -ErrorAction SilentlyContinue) {
|
||||||
|
rustup default stable
|
||||||
|
} else {
|
||||||
|
Invoke-WebRequest -Uri https://win.rustup.rs/x86_64 -OutFile rustup-init.exe
|
||||||
|
.\rustup-init.exe -y --default-toolchain stable
|
||||||
|
echo "$env:USERPROFILE\.cargo\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Install npm dependencies
|
||||||
|
shell: powershell
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build Tauri app
|
||||||
|
shell: powershell
|
||||||
|
run: npm run tauri build
|
||||||
|
|
||||||
|
- name: Upload to release
|
||||||
|
shell: powershell
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
$REPO_API = "${{ github.server_url }}/api/v1/repos/${{ github.repository }}"
|
||||||
|
$Headers = @{ "Authorization" = "token $env:BUILD_TOKEN" }
|
||||||
|
$TAG = "${{ needs.bump-version.outputs.tag }}"
|
||||||
|
Write-Host "Release tag: ${TAG}"
|
||||||
|
|
||||||
|
$release = Invoke-RestMethod -Uri "${REPO_API}/releases/tags/${TAG}" -Headers $Headers -ErrorAction Stop
|
||||||
|
$RELEASE_ID = $release.id
|
||||||
|
Write-Host "Release ID: ${RELEASE_ID}"
|
||||||
|
|
||||||
|
Get-ChildItem -Path src-tauri\target\release\bundle -Recurse -Include *.msi,*-setup.exe | ForEach-Object {
|
||||||
|
$filename = $_.Name
|
||||||
|
$encodedName = [System.Uri]::EscapeDataString($filename)
|
||||||
|
$size = [math]::Round($_.Length / 1MB, 1)
|
||||||
|
Write-Host "Uploading ${filename} (${size} MB)..."
|
||||||
|
|
||||||
|
try {
|
||||||
|
$assets = Invoke-RestMethod -Uri "${REPO_API}/releases/${RELEASE_ID}/assets" -Headers $Headers
|
||||||
|
$existing = $assets | Where-Object { $_.name -eq $filename }
|
||||||
|
if ($existing) {
|
||||||
|
Invoke-RestMethod -Uri "${REPO_API}/releases/${RELEASE_ID}/assets/$($existing.id)" -Method Delete -Headers $Headers
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
$uploadUrl = "${REPO_API}/releases/${RELEASE_ID}/assets?name=${encodedName}"
|
||||||
|
$result = curl.exe --fail --silent --show-error `
|
||||||
|
-X POST `
|
||||||
|
-H "Authorization: token $env:BUILD_TOKEN" `
|
||||||
|
-H "Content-Type: application/octet-stream" `
|
||||||
|
-T "$($_.FullName)" `
|
||||||
|
"$uploadUrl" 2>&1
|
||||||
|
if ($LASTEXITCODE -eq 0) {
|
||||||
|
Write-Host "Upload successful: ${filename}"
|
||||||
|
} else {
|
||||||
|
Write-Host "WARNING: Upload failed for ${filename}: ${result}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
name: Build App (macOS)
|
||||||
|
needs: bump-version
|
||||||
|
runs-on: macos-latest
|
||||||
|
env:
|
||||||
|
NODE_VERSION: "20"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.bump-version.outputs.tag }}
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Rust stable
|
||||||
|
run: |
|
||||||
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable
|
||||||
|
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: brew install --quiet create-dmg || true
|
||||||
|
|
||||||
|
- name: Install npm dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build Tauri app
|
||||||
|
run: npm run tauri build
|
||||||
|
|
||||||
|
- name: Upload to release
|
||||||
|
env:
|
||||||
|
BUILD_TOKEN: ${{ secrets.BUILD_TOKEN }}
|
||||||
|
run: |
|
||||||
|
which jq || brew install jq
|
||||||
|
REPO_API="${GITHUB_SERVER_URL}/api/v1/repos/${GITHUB_REPOSITORY}"
|
||||||
|
TAG="${{ needs.bump-version.outputs.tag }}"
|
||||||
|
echo "Release tag: ${TAG}"
|
||||||
|
|
||||||
|
RELEASE_ID=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/tags/${TAG}" | jq -r '.id // empty')
|
||||||
|
|
||||||
|
if [ -z "${RELEASE_ID}" ] || [ "${RELEASE_ID}" = "null" ]; then
|
||||||
|
echo "ERROR: Failed to find release for tag ${TAG}."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Release ID: ${RELEASE_ID}"
|
||||||
|
|
||||||
|
find src-tauri/target/release/bundle -type f -name "*.dmg" | while IFS= read -r file; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
encoded_name=$(echo "$filename" | sed 's/ /%20/g')
|
||||||
|
echo "Uploading ${filename} ($(du -h "$file" | cut -f1))..."
|
||||||
|
|
||||||
|
ASSET_ID=$(curl -s -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets" | jq -r ".[] | select(.name == \"${filename}\") | .id // empty")
|
||||||
|
if [ -n "${ASSET_ID}" ]; then
|
||||||
|
curl -s -X DELETE -H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets/${ASSET_ID}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
|
||||||
|
-H "Authorization: token ${BUILD_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
-T "$file" \
|
||||||
|
"${REPO_API}/releases/${RELEASE_ID}/assets?name=${encoded_name}")
|
||||||
|
echo "Upload response: HTTP ${HTTP_CODE}"
|
||||||
|
done
|
||||||
184
local-transcription-headless.spec
Normal file
184
local-transcription-headless.spec
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
# -*- mode: python ; coding: utf-8 -*-
|
||||||
|
"""PyInstaller spec file for headless Local Transcription backend (no PySide6/Qt).
|
||||||
|
|
||||||
|
This builds the Python sidecar for the Tauri frontend.
|
||||||
|
Much simpler than local-transcription.spec since all Qt dependencies are removed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
block_cipher = None
|
||||||
|
is_windows = sys.platform == 'win32'
|
||||||
|
|
||||||
|
from PyInstaller.utils.hooks import collect_submodules, collect_data_files
|
||||||
|
|
||||||
|
# Find faster_whisper assets folder
|
||||||
|
import faster_whisper
|
||||||
|
faster_whisper_path = os.path.dirname(faster_whisper.__file__)
|
||||||
|
vad_assets_path = os.path.join(faster_whisper_path, 'assets')
|
||||||
|
|
||||||
|
# pvporcupine resources (indirect dependency from RealtimeSTT)
|
||||||
|
try:
|
||||||
|
import pvporcupine
|
||||||
|
pvporcupine_path = os.path.dirname(pvporcupine.__file__)
|
||||||
|
pvporcupine_resources = os.path.join(pvporcupine_path, 'resources')
|
||||||
|
pvporcupine_lib = os.path.join(pvporcupine_path, 'lib')
|
||||||
|
pvporcupine_data_files = []
|
||||||
|
if os.path.exists(pvporcupine_resources):
|
||||||
|
pvporcupine_data_files.append((pvporcupine_resources, 'pvporcupine/resources'))
|
||||||
|
if os.path.exists(pvporcupine_lib):
|
||||||
|
pvporcupine_data_files.append((pvporcupine_lib, 'pvporcupine/lib'))
|
||||||
|
except ImportError:
|
||||||
|
pvporcupine_data_files = []
|
||||||
|
|
||||||
|
# Data files
|
||||||
|
datas = [
|
||||||
|
('config/default_config.yaml', 'config'),
|
||||||
|
(vad_assets_path, 'faster_whisper/assets'),
|
||||||
|
] + pvporcupine_data_files
|
||||||
|
|
||||||
|
# Hidden imports -- NO PySide6/Qt needed for headless backend
|
||||||
|
hiddenimports = [
|
||||||
|
# Transcription engine
|
||||||
|
'faster_whisper',
|
||||||
|
'faster_whisper.transcribe',
|
||||||
|
'faster_whisper.vad',
|
||||||
|
'ctranslate2',
|
||||||
|
'sounddevice',
|
||||||
|
'scipy',
|
||||||
|
'scipy.signal',
|
||||||
|
'numpy',
|
||||||
|
# RealtimeSTT
|
||||||
|
'RealtimeSTT',
|
||||||
|
'RealtimeSTT.audio_recorder',
|
||||||
|
'webrtcvad',
|
||||||
|
'webrtcvad_wheels',
|
||||||
|
'silero_vad',
|
||||||
|
# PyTorch
|
||||||
|
'torch',
|
||||||
|
'torch.nn',
|
||||||
|
'torch.nn.functional',
|
||||||
|
'torchaudio',
|
||||||
|
'onnxruntime',
|
||||||
|
'onnxruntime.capi',
|
||||||
|
'onnxruntime.capi.onnxruntime_pybind11_state',
|
||||||
|
'pyaudio',
|
||||||
|
'halo',
|
||||||
|
'colorama',
|
||||||
|
# FastAPI and dependencies
|
||||||
|
'fastapi',
|
||||||
|
'fastapi.routing',
|
||||||
|
'fastapi.responses',
|
||||||
|
'starlette',
|
||||||
|
'starlette.applications',
|
||||||
|
'starlette.routing',
|
||||||
|
'starlette.responses',
|
||||||
|
'starlette.websockets',
|
||||||
|
'starlette.middleware',
|
||||||
|
'starlette.middleware.cors',
|
||||||
|
'pydantic',
|
||||||
|
'pydantic.fields',
|
||||||
|
'pydantic.main',
|
||||||
|
'anyio',
|
||||||
|
'anyio._backends',
|
||||||
|
'anyio._backends._asyncio',
|
||||||
|
'sniffio',
|
||||||
|
# Uvicorn
|
||||||
|
'uvicorn',
|
||||||
|
'uvicorn.logging',
|
||||||
|
'uvicorn.loops',
|
||||||
|
'uvicorn.loops.auto',
|
||||||
|
'uvicorn.protocols',
|
||||||
|
'uvicorn.protocols.http',
|
||||||
|
'uvicorn.protocols.http.auto',
|
||||||
|
'uvicorn.protocols.http.h11_impl',
|
||||||
|
'uvicorn.protocols.websockets',
|
||||||
|
'uvicorn.protocols.websockets.auto',
|
||||||
|
'uvicorn.protocols.websockets.wsproto_impl',
|
||||||
|
'uvicorn.lifespan',
|
||||||
|
'uvicorn.lifespan.on',
|
||||||
|
'h11',
|
||||||
|
'websockets',
|
||||||
|
'websockets.legacy',
|
||||||
|
'websockets.legacy.server',
|
||||||
|
# HTTP client
|
||||||
|
'requests',
|
||||||
|
'urllib3',
|
||||||
|
'certifi',
|
||||||
|
'charset_normalizer',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Collect submodules for key packages
|
||||||
|
print("Collecting submodules for backend packages...")
|
||||||
|
for package in ['fastapi', 'starlette', 'pydantic', 'pydantic_core', 'anyio', 'uvicorn', 'websockets', 'h11', 'httptools', 'uvloop']:
|
||||||
|
try:
|
||||||
|
submodules = collect_submodules(package)
|
||||||
|
hiddenimports += submodules
|
||||||
|
print(f" + Collected {len(submodules)} submodules from {package}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" - Warning: Could not collect {package}: {e}")
|
||||||
|
|
||||||
|
# Collect data files
|
||||||
|
for package in ['fastapi', 'starlette', 'pydantic', 'uvicorn', 'RealtimeSTT']:
|
||||||
|
try:
|
||||||
|
data_files = collect_data_files(package)
|
||||||
|
if data_files:
|
||||||
|
datas += data_files
|
||||||
|
print(f" + Collected {len(data_files)} data files from {package}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Pydantic critical deps
|
||||||
|
hiddenimports += [
|
||||||
|
'colorsys', 'decimal', 'json', 'ipaddress', 'pathlib', 'uuid',
|
||||||
|
'email.message', 'typing_extensions',
|
||||||
|
]
|
||||||
|
|
||||||
|
a = Analysis(
|
||||||
|
['backend/main_headless.py'],
|
||||||
|
pathex=[],
|
||||||
|
binaries=[],
|
||||||
|
datas=datas,
|
||||||
|
hiddenimports=hiddenimports,
|
||||||
|
hookspath=['hooks'],
|
||||||
|
hooksconfig={},
|
||||||
|
runtime_hooks=[],
|
||||||
|
excludes=['enum34', 'PySide6', 'PyQt5', 'PyQt6', 'tkinter'],
|
||||||
|
win_no_prefer_redirects=False,
|
||||||
|
win_private_assemblies=False,
|
||||||
|
cipher=block_cipher,
|
||||||
|
noarchive=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||||
|
|
||||||
|
exe = EXE(
|
||||||
|
pyz,
|
||||||
|
a.scripts,
|
||||||
|
[],
|
||||||
|
exclude_binaries=True,
|
||||||
|
name='local-transcription-backend',
|
||||||
|
debug=False,
|
||||||
|
bootloader_ignore_signals=False,
|
||||||
|
strip=False,
|
||||||
|
upx=True,
|
||||||
|
console=True, # Headless backend needs console for JSON output
|
||||||
|
disable_windowed_traceback=False,
|
||||||
|
argv_emulation=False,
|
||||||
|
target_arch=None,
|
||||||
|
codesign_identity=None,
|
||||||
|
entitlements_file=None,
|
||||||
|
icon='LocalTranscription.ico' if is_windows else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
coll = COLLECT(
|
||||||
|
exe,
|
||||||
|
a.binaries,
|
||||||
|
a.zipfiles,
|
||||||
|
a.datas,
|
||||||
|
strip=False,
|
||||||
|
upx=True,
|
||||||
|
upx_exclude=[],
|
||||||
|
name='local-transcription-backend',
|
||||||
|
)
|
||||||
Reference in New Issue
Block a user