From 3344ce1cbf24368f2b0c334ff0b13d7edeb3b5e6 Mon Sep 17 00:00:00 2001 From: Josh Knapp Date: Tue, 3 Mar 2026 14:22:16 -0800 Subject: [PATCH] fix: prevent spurious container recreation on every start The CLAUDE_INSTRUCTIONS env var was computed differently during container creation (with port mapping docs + scheduler instructions appended) vs the recreation check (bare merge only). This caused container_needs_recreation() to always return true, triggering a full recreate on every stop/start cycle. Extract build_claude_instructions() helper used by both code paths so the expected value always matches what was set at creation time. Also add TODO.md noting planned tauri-plugin-updater integration for seamless in-app updates on all platforms. Co-Authored-By: Claude Opus 4.6 --- TODO.md | 60 ++++++++++++++++++++++++ app/src-tauri/src/docker/container.rs | 66 +++++++++++++++++---------- 2 files changed, 101 insertions(+), 25 deletions(-) create mode 100644 TODO.md diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..019ba4a --- /dev/null +++ b/TODO.md @@ -0,0 +1,60 @@ +# TODO / Future Improvements + +## In-App Auto-Update via `tauri-plugin-updater` + +**Priority:** High +**Status:** Planned + +Currently the app detects available updates via the Gitea API (`check_for_updates` command) but cannot apply them. Users must manually download and install the new version. On macOS and Linux this is a poor experience compared to Windows (where NSIS handles upgrades cleanly). + +### Recommended approach: `tauri-plugin-updater` + +Full in-app auto-update: detects, downloads, verifies, and applies updates seamlessly on all platforms. The user clicks "Update" and the app restarts with the new version. + +### Requirements + +1. **Generate a Tauri update signing key pair** (this is Tauri's own Ed25519 key, not OS code signing): + ```bash + npx @tauri-apps/cli signer generate -w ~/.tauri/triple-c.key + ``` + Set `TAURI_SIGNING_PRIVATE_KEY` and `TAURI_SIGNING_PRIVATE_KEY_PASSWORD` in CI. + +2. **Add `tauri-plugin-updater`** to Rust and JS dependencies. + +3. **Create an update endpoint** that returns Tauri's expected JSON format: + ```json + { + "version": "v0.1.100", + "notes": "Changelog here", + "pub_date": "2026-03-01T00:00:00Z", + "platforms": { + "darwin-x86_64": { "signature": "...", "url": "https://..." }, + "darwin-aarch64": { "signature": "...", "url": "https://..." }, + "linux-x86_64": { "signature": "...", "url": "https://..." }, + "windows-x86_64": { "signature": "...", "url": "https://..." } + } + } + ``` + This could be a static JSON file uploaded alongside release assets, or a small API that reads from Gitea releases and reformats. + +4. **Configure the updater** in `tauri.conf.json`: + ```json + "plugins": { + "updater": { + "endpoints": ["https://repo.anhonesthost.net/...update-endpoint..."], + "pubkey": "" + } + } + ``` + +5. **Add frontend UI** for the update prompt (replace or enhance the existing update check flow). + +6. **Update CI pipeline** to: + - Sign bundles with the Tauri key during build + - Upload `.sig` files alongside installers + - Generate/upload the update endpoint JSON + +### References +- https://v2.tauri.app/plugin/updater/ +- Existing update check code: `app/src-tauri/src/commands/update_commands.rs` +- Existing models: `app/src-tauri/src/models/update_info.rs` diff --git a/app/src-tauri/src/docker/container.rs b/app/src-tauri/src/docker/container.rs index 6e6a188..02a850a 100644 --- a/app/src-tauri/src/docker/container.rs +++ b/app/src-tauri/src/docker/container.rs @@ -40,6 +40,42 @@ After tasks run, check notifications with `triple-c-scheduler notifications` and ### Timezone Scheduled times use the container's configured timezone (check with `date`). If no timezone is configured, UTC is used."#; +/// Build the full CLAUDE_INSTRUCTIONS value by merging global + project +/// instructions, appending port mapping docs, and appending scheduler docs. +/// Used by both create_container() and container_needs_recreation() to ensure +/// the same value is produced in both paths. +fn build_claude_instructions( + global_instructions: Option<&str>, + project_instructions: Option<&str>, + port_mappings: &[PortMapping], +) -> Option { + let mut combined = merge_claude_instructions(global_instructions, project_instructions); + + if !port_mappings.is_empty() { + let mut port_lines: Vec = Vec::new(); + port_lines.push("## Available Port Mappings".to_string()); + port_lines.push("The following ports are mapped from the host to this container. Use these container ports when starting services that need to be accessible from the host:".to_string()); + for pm in port_mappings { + port_lines.push(format!( + "- Host port {} -> Container port {} ({})", + pm.host_port, pm.container_port, pm.protocol + )); + } + let port_info = port_lines.join("\n"); + combined = Some(match combined { + Some(existing) => format!("{}\n\n{}", existing, port_info), + None => port_info, + }); + } + + combined = Some(match combined { + Some(existing) => format!("{}\n\n{}", existing, SCHEDULER_INSTRUCTIONS), + None => SCHEDULER_INSTRUCTIONS.to_string(), + }); + + combined +} + /// Compute a fingerprint string for the custom environment variables. /// Sorted alphabetically so order changes do not cause spurious recreation. fn compute_env_fingerprint(custom_env_vars: &[EnvVar]) -> String { @@ -307,33 +343,12 @@ pub async fn create_container( } } - // Claude instructions (global + per-project, plus port mapping info) - let mut combined_instructions = merge_claude_instructions( + // Claude instructions (global + per-project, plus port mapping info + scheduler docs) + let combined_instructions = build_claude_instructions( global_claude_instructions, project.claude_instructions.as_deref(), + &project.port_mappings, ); - if !project.port_mappings.is_empty() { - let mut port_lines: Vec = Vec::new(); - port_lines.push("## Available Port Mappings".to_string()); - port_lines.push("The following ports are mapped from the host to this container. Use these container ports when starting services that need to be accessible from the host:".to_string()); - for pm in &project.port_mappings { - port_lines.push(format!( - "- Host port {} -> Container port {} ({})", - pm.host_port, pm.container_port, pm.protocol - )); - } - let port_info = port_lines.join("\n"); - combined_instructions = Some(match combined_instructions { - Some(existing) => format!("{}\n\n{}", existing, port_info), - None => port_info, - }); - } - // Scheduler instructions (always appended so all containers get scheduling docs) - let scheduler_docs = SCHEDULER_INSTRUCTIONS; - combined_instructions = Some(match combined_instructions { - Some(existing) => format!("{}\n\n{}", existing, scheduler_docs), - None => scheduler_docs.to_string(), - }); if let Some(ref instructions) = combined_instructions { env_vars.push(format!("CLAUDE_INSTRUCTIONS={}", instructions)); @@ -685,9 +700,10 @@ pub async fn container_needs_recreation( } // ── Claude instructions ─────────────────────────────────────────────── - let expected_instructions = merge_claude_instructions( + let expected_instructions = build_claude_instructions( global_claude_instructions, project.claude_instructions.as_deref(), + &project.port_mappings, ); let container_instructions = get_env("CLAUDE_INSTRUCTIONS"); if container_instructions.as_deref() != expected_instructions.as_deref() {