Compare commits
18 Commits
v0.2.5-mac
...
v0.2.23
| Author | SHA1 | Date | |
|---|---|---|---|
| 9ee0d34c19 | |||
| 922543cc04 | |||
| 13038989b8 | |||
| b55de8d75e | |||
| 8512ca615d | |||
| ebae39026f | |||
| d34e8e2c6d | |||
| 3935104cb5 | |||
| b17c759bd6 | |||
| bab1df1c57 | |||
| b952b8e8de | |||
| d7d7a83aec | |||
| 879322bc9a | |||
| ecaa42fa77 | |||
| 280358166a | |||
| 4732feb33e | |||
| 5977024953 | |||
| 27007b90e3 |
@@ -36,6 +36,13 @@ jobs:
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: shadowdao
|
||||
password: ${{ secrets.GH_PAT }}
|
||||
|
||||
- name: Build and push container image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
@@ -46,5 +53,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
|
||||
ghcr.io/shadowdao/triple-c-sandbox:latest
|
||||
ghcr.io/shadowdao/triple-c-sandbox:${{ gitea.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -72,7 +72,7 @@ docker exec stdout → tokio task → emit("terminal-output-{sessionId}") → li
|
||||
- `container.rs` — Container lifecycle (create, start, stop, remove, inspect)
|
||||
- `exec.rs` — PTY exec sessions with bidirectional stdin/stdout streaming
|
||||
- `image.rs` — Image build/pull with progress streaming
|
||||
- **`models/`** — Serde structs (`Project`, `Backend`, `BedrockConfig`, `OllamaConfig`, `LiteLlmConfig`, `ContainerInfo`, `AppSettings`). These define the IPC contract with the frontend.
|
||||
- **`models/`** — Serde structs (`Project`, `Backend`, `BedrockConfig`, `OllamaConfig`, `OpenAiCompatibleConfig`, `ContainerInfo`, `AppSettings`). These define the IPC contract with the frontend.
|
||||
- **`storage/`** — Persistence: `projects_store.rs` (JSON file with atomic writes), `secure.rs` (OS keychain via `keyring` crate), `settings_store.rs`
|
||||
|
||||
### Container (`container/`)
|
||||
@@ -91,7 +91,7 @@ Per-project, independently configured:
|
||||
- **Anthropic (OAuth)** — `claude login` in terminal, token persists in config volume
|
||||
- **AWS Bedrock** — Static keys, profile, or bearer token injected as env vars
|
||||
- **Ollama** — Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`)
|
||||
- **LiteLLM** — Connect through a LiteLLM proxy gateway via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN` to access 100+ model providers
|
||||
- **OpenAI Compatible** — Connect through any OpenAI API-compatible endpoint (LiteLLM, OpenRouter, vLLM, etc.) via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN`
|
||||
|
||||
## Styling
|
||||
|
||||
|
||||
@@ -4,6 +4,25 @@ Triple-C (Claude-Code-Container) is a desktop application that runs Claude Code
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [First Launch](#first-launch)
|
||||
- [The Interface](#the-interface)
|
||||
- [Project Management](#project-management)
|
||||
- [Project Configuration](#project-configuration)
|
||||
- [MCP Servers (Beta)](#mcp-servers-beta)
|
||||
- [AWS Bedrock Configuration](#aws-bedrock-configuration)
|
||||
- [Ollama Configuration](#ollama-configuration)
|
||||
- [OpenAI Compatible Configuration](#openai-compatible-configuration)
|
||||
- [Settings](#settings)
|
||||
- [Terminal Features](#terminal-features)
|
||||
- [Scheduled Tasks (Inside the Container)](#scheduled-tasks-inside-the-container)
|
||||
- [What's Inside the Container](#whats-inside-the-container)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Docker
|
||||
@@ -34,7 +53,7 @@ You need access to Claude Code through one of:
|
||||
- **Anthropic account** — Sign up at https://claude.ai and use `claude login` (OAuth) inside the terminal
|
||||
- **AWS Bedrock** — An AWS account with Bedrock access and Claude models enabled
|
||||
- **Ollama** — A local or remote Ollama server running an Anthropic-compatible model (best-effort support)
|
||||
- **LiteLLM** — A LiteLLM proxy gateway providing access to 100+ model providers (best-effort support)
|
||||
- **OpenAI Compatible** — Any OpenAI API-compatible endpoint (LiteLLM, OpenRouter, vLLM, text-generation-inference, LocalAI, etc.) (best-effort support)
|
||||
|
||||
---
|
||||
|
||||
@@ -51,7 +70,7 @@ Choose an **Image Source**:
|
||||
|
||||
| Source | Description | When to Use |
|
||||
|--------|-------------|-------------|
|
||||
| **Registry** | Pulls the pre-built image from `repo.anhonesthost.net` | Fastest setup — recommended for most users |
|
||||
| **Registry** | Pulls the pre-built image from `ghcr.io` | Fastest setup — recommended for most users |
|
||||
| **Local Build** | Builds the image locally from the embedded Dockerfile | If you can't reach the registry, or want a custom build |
|
||||
| **Custom** | Use any Docker image you specify | Advanced — bring your own sandbox image |
|
||||
|
||||
@@ -73,7 +92,7 @@ Select your project in the sidebar and click **Start**. A progress modal appears
|
||||
|
||||
Click the **Terminal** button to open an interactive terminal session. A new tab appears in the top bar and an xterm.js terminal loads in the main area.
|
||||
|
||||
Claude Code launches automatically with `--dangerously-skip-permissions` inside the sandboxed container.
|
||||
Claude Code launches automatically. By default, it runs in standard permission mode and will ask for your approval before executing commands or editing files. To enable auto-approval of all actions within the sandbox, enable **Full Permissions** in the project configuration.
|
||||
|
||||
### 5. Authenticate
|
||||
|
||||
@@ -94,14 +113,15 @@ Claude Code launches automatically with `--dangerously-skip-permissions` inside
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the backend to **Ollama**.
|
||||
3. Expand the **Config** panel and set the base URL of your Ollama server (defaults to `http://host.docker.internal:11434` for a local instance). Optionally set a model ID.
|
||||
4. Start the container again.
|
||||
3. Expand the **Config** panel and set the base URL of your Ollama server (defaults to `http://host.docker.internal:11434` for a local instance). Set the **Model ID** to the model you want to use (required).
|
||||
4. Make sure the model has been pulled in Ollama (e.g., `ollama pull qwen3.5:27b`) or used via Ollama cloud before starting.
|
||||
5. Start the container again.
|
||||
|
||||
**LiteLLM:**
|
||||
**OpenAI Compatible:**
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the backend to **LiteLLM**.
|
||||
3. Expand the **Config** panel and set the base URL of your LiteLLM proxy (defaults to `http://host.docker.internal:4000`). Optionally set an API key and model ID.
|
||||
2. In the project card, switch the backend to **OpenAI Compatible**.
|
||||
3. Expand the **Config** panel and set the base URL of your OpenAI-compatible endpoint (defaults to `http://host.docker.internal:4000` as an example). Optionally set an API key and model ID.
|
||||
4. Start the container again.
|
||||
|
||||
---
|
||||
@@ -216,6 +236,18 @@ Available skills include `/mission`, `/flight`, `/leg`, `/agentic-workflow`, `/f
|
||||
|
||||
> This setting can only be changed when the container is stopped. Toggling it triggers a container recreation on the next start.
|
||||
|
||||
### Full Permissions
|
||||
|
||||
Toggle **Full Permissions** to allow Claude Code to run with `--dangerously-skip-permissions` inside the container. This is **off by default**.
|
||||
|
||||
When **enabled**, Claude auto-approves all tool calls (file edits, shell commands, etc.) without prompting you. This is the fastest workflow since you won't be interrupted for approvals, and the Docker container provides isolation.
|
||||
|
||||
When **disabled** (default), Claude prompts you for approval before executing each action, giving you fine-grained control over what it does.
|
||||
|
||||
> **CAUTION:** Enabling full permissions means Claude can execute any command inside the container without asking. While the container sandbox limits the blast radius, make sure you understand the implications — especially if the container has Docker socket access or network connectivity.
|
||||
|
||||
> This setting can only be changed when the container is stopped. It takes effect the next time you open a terminal session.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Click **Edit** to open the environment variables modal. Add key-value pairs that will be injected into the container. Per-project variables override global variables with the same key.
|
||||
@@ -395,7 +427,7 @@ To use Claude Code with a local or remote Ollama server, switch the backend to *
|
||||
### Settings
|
||||
|
||||
- **Base URL** — The URL of your Ollama server. Defaults to `http://host.docker.internal:11434`, which reaches a locally running Ollama instance from inside the container. For a remote server, use its IP or hostname (e.g., `http://192.168.1.100:11434`).
|
||||
- **Model ID** — Optional. Override the model to use (e.g., `qwen3.5:27b`).
|
||||
- **Model ID** — **Required.** The model to use (e.g., `qwen3.5:27b`). The model must be pulled in Ollama before use — run `ollama pull <model>` or use it via Ollama cloud so it is available when the container starts.
|
||||
|
||||
### How It Works
|
||||
|
||||
@@ -403,23 +435,25 @@ Triple-C sets `ANTHROPIC_BASE_URL` to point Claude Code at your Ollama server in
|
||||
|
||||
> **Note:** Ollama support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models.
|
||||
|
||||
> **Important:** The model must already be available in Ollama before starting the container. If using a local Ollama instance, pull the model first with `ollama pull <model-name>`. If using Ollama's cloud service, ensure the model has been used at least once so it is cached.
|
||||
|
||||
---
|
||||
|
||||
## LiteLLM Configuration
|
||||
## OpenAI Compatible Configuration
|
||||
|
||||
To use Claude Code through a [LiteLLM](https://docs.litellm.ai/) proxy gateway, switch the backend to **LiteLLM** on the project card. LiteLLM supports 100+ model providers (OpenAI, Gemini, Anthropic, and more) through a single proxy.
|
||||
To use Claude Code through any OpenAI API-compatible endpoint, switch the backend to **OpenAI Compatible** on the project card. This works with any server that exposes an OpenAI-compatible API, including LiteLLM, OpenRouter, vLLM, text-generation-inference, LocalAI, and others.
|
||||
|
||||
### Settings
|
||||
|
||||
- **Base URL** — The URL of your LiteLLM proxy. Defaults to `http://host.docker.internal:4000` for a locally running proxy.
|
||||
- **API Key** — Optional. The API key for your LiteLLM proxy, if authentication is required. Stored securely in your OS keychain.
|
||||
- **Base URL** — The URL of your OpenAI-compatible endpoint. Defaults to `http://host.docker.internal:4000` as an example (adjust to match your server's address and port).
|
||||
- **API Key** — Optional. The API key for your endpoint, if authentication is required. Stored securely in your OS keychain.
|
||||
- **Model ID** — Optional. Override the model to use.
|
||||
|
||||
### How It Works
|
||||
|
||||
Triple-C sets `ANTHROPIC_BASE_URL` to point Claude Code at your LiteLLM proxy. If an API key is provided, it is set as `ANTHROPIC_AUTH_TOKEN`.
|
||||
Triple-C sets `ANTHROPIC_BASE_URL` to point Claude Code at your OpenAI-compatible endpoint. If an API key is provided, it is set as `ANTHROPIC_AUTH_TOKEN`.
|
||||
|
||||
> **Note:** LiteLLM support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected when routing to non-Anthropic models through the proxy.
|
||||
> **Note:** OpenAI Compatible support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected when routing to non-Anthropic models through the endpoint.
|
||||
|
||||
---
|
||||
|
||||
@@ -472,6 +506,10 @@ When Claude Code prints a long URL (e.g., during `claude login`), Triple-C detec
|
||||
|
||||
Shorter URLs in terminal output are also clickable directly.
|
||||
|
||||
### Copying and Pasting
|
||||
|
||||
Use **Ctrl+Shift+C** (or **Cmd+C** on macOS) to copy selected text from the terminal, and **Ctrl+Shift+V** (or **Cmd+V** on macOS) to paste. This follows standard terminal emulator conventions since Ctrl+C is reserved for sending SIGINT.
|
||||
|
||||
### Clipboard Support (OSC 52)
|
||||
|
||||
Programs inside the container can copy text to your host clipboard. When a container program uses `xclip`, `xsel`, or `pbcopy`, the text is transparently forwarded to your host clipboard via OSC 52 escape sequences. No additional configuration is required — this works out of the box.
|
||||
@@ -622,3 +660,13 @@ You can install additional tools at runtime with `sudo apt install`, `pip instal
|
||||
- Ensure the Docker image for the MCP server exists (pull it first if needed).
|
||||
- Check that Docker socket access is available (stdio + Docker MCP servers auto-enable this).
|
||||
- Try resetting the project container to force a clean recreation.
|
||||
|
||||
### "Failed to install Anthropic marketplace" Error
|
||||
|
||||
If Claude Code shows **"Failed to install Anthropic marketplace - Will retry on next startup"** repeatedly, the marketplace metadata in `~/.claude.json` may be corrupted. To fix this, open a **Shell** session in the project and run:
|
||||
|
||||
```bash
|
||||
cp ~/.claude.json ~/.claude.json.bak && jq 'with_entries(select(.key | startswith("officialMarketplace") | not))' ~/.claude.json.bak > ~/.claude.json
|
||||
```
|
||||
|
||||
This backs up your config and removes the corrupted marketplace entries. Claude Code will re-download them cleanly on the next startup.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Triple-C (Claude-Code-Container)
|
||||
|
||||
Triple-C is a cross-platform desktop application that sandboxes Claude Code inside Docker containers. When running with `--dangerously-skip-permissions`, Claude only has access to the files and projects you explicitly provide to it.
|
||||
Triple-C is a cross-platform desktop application that sandboxes Claude Code inside Docker containers. Each project can optionally enable full permissions mode (`--dangerously-skip-permissions`), giving Claude unrestricted access within the sandbox.
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -49,10 +49,10 @@ Each project can independently use one of:
|
||||
|
||||
- **Anthropic** (OAuth): User runs `claude login` inside the terminal on first use. Token persisted in the config volume across restarts and resets.
|
||||
- **AWS Bedrock**: Per-project AWS credentials (static keys, profile, or bearer token). SSO sessions are validated before launching Claude for Profile auth.
|
||||
- **Ollama**: Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`). Optional model override.
|
||||
- **LiteLLM**: Connect through a LiteLLM proxy gateway via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN` to access 100+ model providers. API key stored securely in OS keychain.
|
||||
- **Ollama**: Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`). Requires a model ID, and the model must be pulled (or used via Ollama cloud) before starting the container.
|
||||
- **OpenAI Compatible**: Connect through any OpenAI API-compatible endpoint (LiteLLM, OpenRouter, vLLM, text-generation-inference, LocalAI, etc.) via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN`. API key stored securely in OS keychain.
|
||||
|
||||
> **Note:** Ollama and LiteLLM support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models behind these backends.
|
||||
> **Note:** Ollama and OpenAI Compatible support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models behind these backends.
|
||||
|
||||
### Container Spawning (Sibling Containers)
|
||||
|
||||
|
||||
279
app/src-tauri/Cargo.lock
generated
279
app/src-tauri/Cargo.lock
generated
@@ -213,6 +213,61 @@ version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"itoa",
|
||||
"matchit",
|
||||
"memchr",
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"serde_core",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"serde_urlencoded",
|
||||
"sha1",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"sync_wrapper",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
@@ -664,14 +719,38 @@ dependencies = [
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
|
||||
dependencies = [
|
||||
"darling_core 0.20.11",
|
||||
"darling_macro 0.20.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling"
|
||||
version = "0.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_macro",
|
||||
"darling_core 0.21.3",
|
||||
"darling_macro 0.21.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_core"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"ident_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -688,17 +767,34 @@ dependencies = [
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
|
||||
dependencies = [
|
||||
"darling_core 0.20.11",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_core 0.21.3",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-encoding"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea"
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.5.8"
|
||||
@@ -709,6 +805,37 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_builder"
|
||||
version = "0.20.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947"
|
||||
dependencies = [
|
||||
"derive_builder_macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_builder_core"
|
||||
version = "0.20.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8"
|
||||
dependencies = [
|
||||
"darling 0.20.11",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_builder_macro"
|
||||
version = "0.20.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
|
||||
dependencies = [
|
||||
"derive_builder_core",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_more"
|
||||
version = "0.99.20"
|
||||
@@ -841,6 +968,12 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
|
||||
[[package]]
|
||||
name = "embed-resource"
|
||||
version = "3.0.6"
|
||||
@@ -1309,6 +1442,18 @@ dependencies = [
|
||||
"wasip3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getset"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cf0fc11e47561d47397154977bc219f4cf809b2974facc3ccb3b89e2436f912"
|
||||
dependencies = [
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gio"
|
||||
version = "0.18.4"
|
||||
@@ -2085,6 +2230,17 @@ version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
|
||||
|
||||
[[package]]
|
||||
name = "local-ip-address"
|
||||
version = "0.6.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79ef8c257c92ade496781a32a581d43e3d512cf8ce714ecf04ea80f93ed0ff4a"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"neli",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.14"
|
||||
@@ -2143,6 +2299,12 @@ version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.8.0"
|
||||
@@ -2246,6 +2408,35 @@ dependencies = [
|
||||
"jni-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "neli"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22f9786d56d972959e1408b6a93be6af13b9c1392036c5c1fafa08a1b0c6ee87"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"byteorder",
|
||||
"derive_builder",
|
||||
"getset",
|
||||
"libc",
|
||||
"log",
|
||||
"neli-proc-macros",
|
||||
"parking_lot",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "neli-proc-macros"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05d8d08c6e98f20a62417478ebf7be8e1425ec9acecc6f63e22da633f6b71609"
|
||||
dependencies = [
|
||||
"either",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "new_debug_unreachable"
|
||||
version = "1.0.6"
|
||||
@@ -2916,6 +3107,28 @@ dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr2"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error2"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-hack"
|
||||
version = "0.5.20+deprecated"
|
||||
@@ -3594,6 +3807,17 @@ dependencies = [
|
||||
"zmij",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_path_to_error"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_repr"
|
||||
version = "0.1.20"
|
||||
@@ -3660,7 +3884,7 @@ version = "3.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6d4e30573c8cb306ed6ab1dca8423eec9a463ea0e155f45399455e0368b27e0"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
@@ -3698,6 +3922,17 @@ dependencies = [
|
||||
"stable_deref_trait",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
@@ -4459,6 +4694,18 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-tungstenite"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"log",
|
||||
"tokio",
|
||||
"tungstenite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.18"
|
||||
@@ -4581,6 +4828,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4619,6 +4867,7 @@ version = "0.1.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
"tracing-core",
|
||||
@@ -4670,6 +4919,8 @@ dependencies = [
|
||||
name = "triple-c"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"base64 0.22.1",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"dirs",
|
||||
@@ -4677,7 +4928,9 @@ dependencies = [
|
||||
"futures-util",
|
||||
"iana-time-zone",
|
||||
"keyring",
|
||||
"local-ip-address",
|
||||
"log",
|
||||
"rand 0.9.2",
|
||||
"reqwest 0.12.28",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -4689,6 +4942,7 @@ dependencies = [
|
||||
"tauri-plugin-opener",
|
||||
"tauri-plugin-store",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -4698,6 +4952,23 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "tungstenite"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"data-encoding",
|
||||
"http",
|
||||
"httparse",
|
||||
"log",
|
||||
"rand 0.9.2",
|
||||
"sha1",
|
||||
"thiserror 2.0.18",
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typeid"
|
||||
version = "1.0.3"
|
||||
|
||||
@@ -31,6 +31,11 @@ tar = "0.4"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
iana-time-zone = "0.1"
|
||||
sha2 = "0.10"
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
base64 = "0.22"
|
||||
rand = "0.9"
|
||||
local-ip-address = "0.6"
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
|
||||
60
app/src-tauri/src/commands/help_commands.rs
Normal file
60
app/src-tauri/src/commands/help_commands.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
use std::sync::OnceLock;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
const HELP_URL: &str =
|
||||
"https://raw.githubusercontent.com/shadowdao/triple-c/main/HOW-TO-USE.md";
|
||||
|
||||
const EMBEDDED_HELP: &str = include_str!("../../../../HOW-TO-USE.md");
|
||||
|
||||
/// Cached help content fetched from the remote repo (or `None` if not yet fetched).
|
||||
static CACHED_HELP: OnceLock<Mutex<Option<String>>> = OnceLock::new();
|
||||
|
||||
/// Return the help markdown content.
|
||||
///
|
||||
/// On the first call, tries to fetch the latest version from the gitea repo.
|
||||
/// If that fails (network error, timeout, etc.), falls back to the version
|
||||
/// embedded at compile time. The result is cached for the rest of the session.
|
||||
#[tauri::command]
|
||||
pub async fn get_help_content() -> Result<String, String> {
|
||||
let mutex = CACHED_HELP.get_or_init(|| Mutex::new(None));
|
||||
let mut guard = mutex.lock().await;
|
||||
|
||||
if let Some(ref cached) = *guard {
|
||||
return Ok(cached.clone());
|
||||
}
|
||||
|
||||
let content = match fetch_remote_help().await {
|
||||
Ok(md) => {
|
||||
log::info!("Loaded help content from remote repo");
|
||||
md
|
||||
}
|
||||
Err(e) => {
|
||||
log::info!("Using embedded help content (remote fetch failed: {})", e);
|
||||
EMBEDDED_HELP.to_string()
|
||||
}
|
||||
};
|
||||
|
||||
*guard = Some(content.clone());
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
async fn fetch_remote_help() -> Result<String, String> {
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()
|
||||
.map_err(|e| format!("Failed to create HTTP client: {}", e))?;
|
||||
|
||||
let resp = client
|
||||
.get(HELP_URL)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch help content: {}", e))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
return Err(format!("Remote returned status {}", resp.status()));
|
||||
}
|
||||
|
||||
resp.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}", e))
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
pub mod aws_commands;
|
||||
pub mod docker_commands;
|
||||
pub mod file_commands;
|
||||
pub mod help_commands;
|
||||
pub mod mcp_commands;
|
||||
pub mod project_commands;
|
||||
pub mod settings_commands;
|
||||
pub mod terminal_commands;
|
||||
pub mod update_commands;
|
||||
pub mod web_terminal_commands;
|
||||
|
||||
@@ -34,9 +34,9 @@ fn store_secrets_for_project(project: &Project) -> Result<(), String> {
|
||||
secure::store_project_secret(&project.id, "aws-bearer-token", v)?;
|
||||
}
|
||||
}
|
||||
if let Some(ref litellm) = project.litellm_config {
|
||||
if let Some(ref v) = litellm.api_key {
|
||||
secure::store_project_secret(&project.id, "litellm-api-key", v)?;
|
||||
if let Some(ref oai_config) = project.openai_compatible_config {
|
||||
if let Some(ref v) = oai_config.api_key {
|
||||
secure::store_project_secret(&project.id, "openai-compatible-api-key", v)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@@ -56,8 +56,8 @@ fn load_secrets_for_project(project: &mut Project) {
|
||||
bedrock.aws_bearer_token = secure::get_project_secret(&project.id, "aws-bearer-token")
|
||||
.unwrap_or(None);
|
||||
}
|
||||
if let Some(ref mut litellm) = project.litellm_config {
|
||||
litellm.api_key = secure::get_project_secret(&project.id, "litellm-api-key")
|
||||
if let Some(ref mut oai_config) = project.openai_compatible_config {
|
||||
oai_config.api_key = secure::get_project_secret(&project.id, "openai-compatible-api-key")
|
||||
.unwrap_or(None);
|
||||
}
|
||||
}
|
||||
@@ -197,11 +197,11 @@ pub async fn start_project_container(
|
||||
}
|
||||
}
|
||||
|
||||
if project.backend == Backend::LiteLlm {
|
||||
let litellm = project.litellm_config.as_ref()
|
||||
.ok_or_else(|| "LiteLLM backend selected but no LiteLLM configuration found.".to_string())?;
|
||||
if litellm.base_url.is_empty() {
|
||||
return Err("LiteLLM base URL is required.".to_string());
|
||||
if project.backend == Backend::OpenAiCompatible {
|
||||
let oai_config = project.openai_compatible_config.as_ref()
|
||||
.ok_or_else(|| "OpenAI Compatible backend selected but no configuration found.".to_string())?;
|
||||
if oai_config.base_url.is_empty() {
|
||||
return Err("OpenAI Compatible base URL is required.".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,10 +17,11 @@ fn build_terminal_cmd(project: &Project, state: &AppState) -> Vec<String> {
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_bedrock_profile {
|
||||
return vec![
|
||||
"claude".to_string(),
|
||||
"--dangerously-skip-permissions".to_string(),
|
||||
];
|
||||
let mut cmd = vec!["claude".to_string()];
|
||||
if project.full_permissions {
|
||||
cmd.push("--dangerously-skip-permissions".to_string());
|
||||
}
|
||||
return cmd;
|
||||
}
|
||||
|
||||
// Resolve AWS profile: project-level → global settings → "default"
|
||||
@@ -33,6 +34,12 @@ fn build_terminal_cmd(project: &Project, state: &AppState) -> Vec<String> {
|
||||
|
||||
// Build a bash wrapper that validates credentials, re-auths if needed,
|
||||
// then exec's into claude.
|
||||
let claude_cmd = if project.full_permissions {
|
||||
"exec claude --dangerously-skip-permissions"
|
||||
} else {
|
||||
"exec claude"
|
||||
};
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
echo "Validating AWS session for profile '{profile}'..."
|
||||
@@ -58,9 +65,10 @@ else
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
exec claude --dangerously-skip-permissions
|
||||
{claude_cmd}
|
||||
"#,
|
||||
profile = profile
|
||||
profile = profile,
|
||||
claude_cmd = claude_cmd
|
||||
);
|
||||
|
||||
vec![
|
||||
|
||||
@@ -1,15 +1,20 @@
|
||||
use serde::Deserialize;
|
||||
use tauri::State;
|
||||
|
||||
use crate::docker;
|
||||
use crate::models::{container_config, GiteaRelease, ImageUpdateInfo, ReleaseAsset, UpdateInfo};
|
||||
use crate::models::{container_config, GitHubRelease, ImageUpdateInfo, ReleaseAsset, UpdateInfo};
|
||||
use crate::AppState;
|
||||
|
||||
const RELEASES_URL: &str =
|
||||
"https://repo.anhonesthost.net/api/v1/repos/cybercovellc/triple-c/releases";
|
||||
"https://api.github.com/repos/shadowdao/triple-c/releases";
|
||||
|
||||
/// Gitea container-registry tag object (v2 manifest).
|
||||
/// GHCR container-registry API base (OCI distribution spec).
|
||||
const REGISTRY_API_BASE: &str =
|
||||
"https://repo.anhonesthost.net/v2/cybercovellc/triple-c/triple-c-sandbox";
|
||||
"https://ghcr.io/v2/shadowdao/triple-c-sandbox";
|
||||
|
||||
/// GHCR token endpoint for anonymous pull access.
|
||||
const GHCR_TOKEN_URL: &str =
|
||||
"https://ghcr.io/token?scope=repository:shadowdao/triple-c-sandbox:pull";
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_app_version() -> String {
|
||||
@@ -23,9 +28,10 @@ pub async fn check_for_updates() -> Result<Option<UpdateInfo>, String> {
|
||||
.build()
|
||||
.map_err(|e| format!("Failed to create HTTP client: {}", e))?;
|
||||
|
||||
let releases: Vec<GiteaRelease> = client
|
||||
let releases: Vec<GitHubRelease> = client
|
||||
.get(RELEASES_URL)
|
||||
.header("Accept", "application/json")
|
||||
.header("User-Agent", "triple-c-updater")
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch releases: {}", e))?
|
||||
@@ -36,30 +42,27 @@ pub async fn check_for_updates() -> Result<Option<UpdateInfo>, String> {
|
||||
let current_version = env!("CARGO_PKG_VERSION");
|
||||
let current_semver = parse_semver(current_version).unwrap_or((0, 0, 0));
|
||||
|
||||
// Determine platform suffix for tag filtering
|
||||
let platform_suffix: &str = if cfg!(target_os = "windows") {
|
||||
"-win"
|
||||
// Determine platform-specific asset extensions
|
||||
let platform_extensions: &[&str] = if cfg!(target_os = "windows") {
|
||||
&[".msi", ".exe"]
|
||||
} else if cfg!(target_os = "macos") {
|
||||
"-mac"
|
||||
&[".dmg", ".app.tar.gz"]
|
||||
} else {
|
||||
"" // Linux uses bare tags (no suffix)
|
||||
&[".AppImage", ".deb", ".rpm"]
|
||||
};
|
||||
|
||||
// Filter releases by platform tag suffix
|
||||
let platform_releases: Vec<&GiteaRelease> = releases
|
||||
// Filter releases that have at least one asset matching the current platform
|
||||
let platform_releases: Vec<&GitHubRelease> = releases
|
||||
.iter()
|
||||
.filter(|r| {
|
||||
if platform_suffix.is_empty() {
|
||||
// Linux: bare tag only (no -win, no -mac)
|
||||
!r.tag_name.ends_with("-win") && !r.tag_name.ends_with("-mac")
|
||||
} else {
|
||||
r.tag_name.ends_with(platform_suffix)
|
||||
}
|
||||
r.assets.iter().any(|a| {
|
||||
platform_extensions.iter().any(|ext| a.name.ends_with(ext))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Find the latest release with a higher semver version
|
||||
let mut best: Option<(&GiteaRelease, (u32, u32, u32))> = None;
|
||||
let mut best: Option<(&GitHubRelease, (u32, u32, u32))> = None;
|
||||
for release in &platform_releases {
|
||||
if let Some(ver) = parse_semver_from_tag(&release.tag_name) {
|
||||
if ver > current_semver {
|
||||
@@ -72,9 +75,13 @@ pub async fn check_for_updates() -> Result<Option<UpdateInfo>, String> {
|
||||
|
||||
match best {
|
||||
Some((release, _)) => {
|
||||
// Only include assets matching the current platform
|
||||
let assets = release
|
||||
.assets
|
||||
.iter()
|
||||
.filter(|a| {
|
||||
platform_extensions.iter().any(|ext| a.name.ends_with(ext))
|
||||
})
|
||||
.map(|a| ReleaseAsset {
|
||||
name: a.name.clone(),
|
||||
browser_download_url: a.browser_download_url.clone(),
|
||||
@@ -82,7 +89,6 @@ pub async fn check_for_updates() -> Result<Option<UpdateInfo>, String> {
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Reconstruct version string from tag
|
||||
let version = extract_version_from_tag(&release.tag_name)
|
||||
.unwrap_or_else(|| release.tag_name.clone());
|
||||
|
||||
@@ -113,17 +119,13 @@ fn parse_semver(version: &str) -> Option<(u32, u32, u32)> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse semver from a tag like "v0.2.5", "v0.2.5-win", "v0.2.5-mac" -> (0, 2, 5)
|
||||
/// Parse semver from a tag like "v0.2.5" -> (0, 2, 5)
|
||||
fn parse_semver_from_tag(tag: &str) -> Option<(u32, u32, u32)> {
|
||||
let clean = tag.trim_start_matches('v');
|
||||
// Remove platform suffix
|
||||
let clean = clean.strip_suffix("-win")
|
||||
.or_else(|| clean.strip_suffix("-mac"))
|
||||
.unwrap_or(clean);
|
||||
parse_semver(clean)
|
||||
}
|
||||
|
||||
/// Extract a clean version string from a tag like "v0.2.5-win" -> "0.2.5"
|
||||
/// Extract a clean version string from a tag like "v0.2.5" -> "0.2.5"
|
||||
fn extract_version_from_tag(tag: &str) -> Option<String> {
|
||||
let (major, minor, patch) = parse_semver_from_tag(tag)?;
|
||||
Some(format!("{}.{}.{}", major, minor, patch))
|
||||
@@ -152,7 +154,7 @@ pub async fn check_image_update(
|
||||
// 1. Get local image digest via Docker
|
||||
let local_digest = docker::get_local_image_digest(&image_name).await.ok().flatten();
|
||||
|
||||
// 2. Get remote digest from the Gitea container registry (OCI distribution spec)
|
||||
// 2. Get remote digest from the GHCR container registry (OCI distribution spec)
|
||||
let remote_digest = fetch_remote_digest("latest").await?;
|
||||
|
||||
// No remote digest available — nothing to compare
|
||||
@@ -176,25 +178,36 @@ pub async fn check_image_update(
|
||||
}))
|
||||
}
|
||||
|
||||
/// Fetch the digest of a tag from the Gitea container registry using the
|
||||
/// OCI / Docker Registry HTTP API v2.
|
||||
/// Fetch the digest of a tag from GHCR using the OCI / Docker Registry HTTP API v2.
|
||||
///
|
||||
/// We issue a HEAD request to /v2/<repo>/manifests/<tag> and read the
|
||||
/// `Docker-Content-Digest` header that the registry returns.
|
||||
/// GHCR requires authentication even for public images, so we first obtain an
|
||||
/// anonymous token, then issue a HEAD request to /v2/<repo>/manifests/<tag>
|
||||
/// and read the `Docker-Content-Digest` header.
|
||||
async fn fetch_remote_digest(tag: &str) -> Result<Option<String>, String> {
|
||||
let url = format!("{}/manifests/{}", REGISTRY_API_BASE, tag);
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(15))
|
||||
.build()
|
||||
.map_err(|e| format!("Failed to create HTTP client: {}", e))?;
|
||||
|
||||
// 1. Obtain anonymous bearer token from GHCR
|
||||
let token = match fetch_ghcr_token(&client).await {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to obtain GHCR token: {}", e);
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
// 2. HEAD the manifest with the token
|
||||
let url = format!("{}/manifests/{}", REGISTRY_API_BASE, tag);
|
||||
|
||||
let response = client
|
||||
.head(&url)
|
||||
.header(
|
||||
"Accept",
|
||||
"application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.index.v1+json",
|
||||
)
|
||||
.header("Authorization", format!("Bearer {}", token))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
@@ -221,3 +234,23 @@ async fn fetch_remote_digest(tag: &str) -> Result<Option<String>, String> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch an anonymous bearer token from GHCR for pulling public images.
|
||||
async fn fetch_ghcr_token(client: &reqwest::Client) -> Result<String, String> {
|
||||
#[derive(Deserialize)]
|
||||
struct TokenResponse {
|
||||
token: String,
|
||||
}
|
||||
|
||||
let resp: TokenResponse = client
|
||||
.get(GHCR_TOKEN_URL)
|
||||
.header("User-Agent", "triple-c-updater")
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("GHCR token request failed: {}", e))?
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse GHCR token response: {}", e))?;
|
||||
|
||||
Ok(resp.token)
|
||||
}
|
||||
|
||||
143
app/src-tauri/src/commands/web_terminal_commands.rs
Normal file
143
app/src-tauri/src/commands/web_terminal_commands.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
use serde::Serialize;
|
||||
use tauri::State;
|
||||
|
||||
use crate::web_terminal::WebTerminalServer;
|
||||
use crate::AppState;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct WebTerminalInfo {
|
||||
pub running: bool,
|
||||
pub port: u16,
|
||||
pub access_token: String,
|
||||
pub local_ip: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
fn generate_token() -> String {
|
||||
use rand::Rng;
|
||||
let mut rng = rand::rng();
|
||||
let bytes: Vec<u8> = (0..32).map(|_| rng.random::<u8>()).collect();
|
||||
use base64::Engine;
|
||||
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(&bytes)
|
||||
}
|
||||
|
||||
fn get_local_ip() -> Option<String> {
|
||||
local_ip_address::local_ip().ok().map(|ip| ip.to_string())
|
||||
}
|
||||
|
||||
fn build_info(running: bool, port: u16, token: &str) -> WebTerminalInfo {
|
||||
let local_ip = get_local_ip();
|
||||
let url = if running {
|
||||
local_ip
|
||||
.as_ref()
|
||||
.map(|ip| format!("http://{}:{}?token={}", ip, port, token))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
WebTerminalInfo {
|
||||
running,
|
||||
port,
|
||||
access_token: token.to_string(),
|
||||
local_ip,
|
||||
url,
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn start_web_terminal(state: State<'_, AppState>) -> Result<WebTerminalInfo, String> {
|
||||
let mut server_guard = state.web_terminal_server.lock().await;
|
||||
if server_guard.is_some() {
|
||||
return Err("Web terminal server is already running".to_string());
|
||||
}
|
||||
|
||||
let mut settings = state.settings_store.get();
|
||||
|
||||
// Auto-generate token if not set
|
||||
if settings.web_terminal.access_token.is_none() {
|
||||
settings.web_terminal.access_token = Some(generate_token());
|
||||
settings.web_terminal.enabled = true;
|
||||
state.settings_store.update(settings.clone()).map_err(|e| format!("Failed to save settings: {}", e))?;
|
||||
}
|
||||
|
||||
let token = settings.web_terminal.access_token.clone().unwrap_or_default();
|
||||
let port = settings.web_terminal.port;
|
||||
|
||||
let server = WebTerminalServer::start(
|
||||
port,
|
||||
token.clone(),
|
||||
state.exec_manager.clone(),
|
||||
state.projects_store.clone(),
|
||||
state.settings_store.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
*server_guard = Some(server);
|
||||
|
||||
// Mark as enabled in settings
|
||||
if !settings.web_terminal.enabled {
|
||||
settings.web_terminal.enabled = true;
|
||||
let _ = state.settings_store.update(settings);
|
||||
}
|
||||
|
||||
Ok(build_info(true, port, &token))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn stop_web_terminal(state: State<'_, AppState>) -> Result<(), String> {
|
||||
let mut server_guard = state.web_terminal_server.lock().await;
|
||||
if let Some(server) = server_guard.take() {
|
||||
server.stop();
|
||||
}
|
||||
|
||||
// Mark as disabled in settings
|
||||
let mut settings = state.settings_store.get();
|
||||
if settings.web_terminal.enabled {
|
||||
settings.web_terminal.enabled = false;
|
||||
let _ = state.settings_store.update(settings);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_web_terminal_status(state: State<'_, AppState>) -> Result<WebTerminalInfo, String> {
|
||||
let server_guard = state.web_terminal_server.lock().await;
|
||||
let settings = state.settings_store.get();
|
||||
let token = settings.web_terminal.access_token.clone().unwrap_or_default();
|
||||
let running = server_guard.is_some();
|
||||
Ok(build_info(running, settings.web_terminal.port, &token))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn regenerate_web_terminal_token(state: State<'_, AppState>) -> Result<WebTerminalInfo, String> {
|
||||
// Stop current server if running
|
||||
{
|
||||
let mut server_guard = state.web_terminal_server.lock().await;
|
||||
if let Some(server) = server_guard.take() {
|
||||
server.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// Generate new token and save
|
||||
let new_token = generate_token();
|
||||
let mut settings = state.settings_store.get();
|
||||
settings.web_terminal.access_token = Some(new_token.clone());
|
||||
state.settings_store.update(settings.clone()).map_err(|e| format!("Failed to save settings: {}", e))?;
|
||||
|
||||
// Restart if was enabled
|
||||
if settings.web_terminal.enabled {
|
||||
let server = WebTerminalServer::start(
|
||||
settings.web_terminal.port,
|
||||
new_token.clone(),
|
||||
state.exec_manager.clone(),
|
||||
state.projects_store.clone(),
|
||||
state.settings_store.clone(),
|
||||
)
|
||||
.await?;
|
||||
let mut server_guard = state.web_terminal_server.lock().await;
|
||||
*server_guard = Some(server);
|
||||
return Ok(build_info(true, settings.web_terminal.port, &new_token));
|
||||
}
|
||||
|
||||
Ok(build_info(false, settings.web_terminal.port, &new_token))
|
||||
}
|
||||
@@ -244,13 +244,13 @@ fn compute_ollama_fingerprint(project: &Project) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute a fingerprint for the LiteLLM configuration so we can detect changes.
|
||||
fn compute_litellm_fingerprint(project: &Project) -> String {
|
||||
if let Some(ref litellm) = project.litellm_config {
|
||||
/// Compute a fingerprint for the OpenAI Compatible configuration so we can detect changes.
|
||||
fn compute_openai_compatible_fingerprint(project: &Project) -> String {
|
||||
if let Some(ref config) = project.openai_compatible_config {
|
||||
let parts = vec![
|
||||
litellm.base_url.clone(),
|
||||
litellm.api_key.as_deref().unwrap_or("").to_string(),
|
||||
litellm.model_id.as_deref().unwrap_or("").to_string(),
|
||||
config.base_url.clone(),
|
||||
config.api_key.as_deref().unwrap_or("").to_string(),
|
||||
config.model_id.as_deref().unwrap_or("").to_string(),
|
||||
];
|
||||
sha256_hex(&parts.join("|"))
|
||||
} else {
|
||||
@@ -516,14 +516,14 @@ pub async fn create_container(
|
||||
}
|
||||
}
|
||||
|
||||
// LiteLLM configuration
|
||||
if project.backend == Backend::LiteLlm {
|
||||
if let Some(ref litellm) = project.litellm_config {
|
||||
env_vars.push(format!("ANTHROPIC_BASE_URL={}", litellm.base_url));
|
||||
if let Some(ref key) = litellm.api_key {
|
||||
// OpenAI Compatible configuration
|
||||
if project.backend == Backend::OpenAiCompatible {
|
||||
if let Some(ref config) = project.openai_compatible_config {
|
||||
env_vars.push(format!("ANTHROPIC_BASE_URL={}", config.base_url));
|
||||
if let Some(ref key) = config.api_key {
|
||||
env_vars.push(format!("ANTHROPIC_AUTH_TOKEN={}", key));
|
||||
}
|
||||
if let Some(ref model) = litellm.model_id {
|
||||
if let Some(ref model) = config.model_id {
|
||||
env_vars.push(format!("ANTHROPIC_MODEL={}", model));
|
||||
}
|
||||
}
|
||||
@@ -698,12 +698,19 @@ pub async fn create_container(
|
||||
labels.insert("triple-c.paths-fingerprint".to_string(), compute_paths_fingerprint(&project.paths));
|
||||
labels.insert("triple-c.bedrock-fingerprint".to_string(), compute_bedrock_fingerprint(project));
|
||||
labels.insert("triple-c.ollama-fingerprint".to_string(), compute_ollama_fingerprint(project));
|
||||
labels.insert("triple-c.litellm-fingerprint".to_string(), compute_litellm_fingerprint(project));
|
||||
labels.insert("triple-c.openai-compatible-fingerprint".to_string(), compute_openai_compatible_fingerprint(project));
|
||||
labels.insert("triple-c.ports-fingerprint".to_string(), compute_ports_fingerprint(&project.port_mappings));
|
||||
labels.insert("triple-c.image".to_string(), image_name.to_string());
|
||||
labels.insert("triple-c.timezone".to_string(), timezone.unwrap_or("").to_string());
|
||||
labels.insert("triple-c.mcp-fingerprint".to_string(), compute_mcp_fingerprint(mcp_servers));
|
||||
labels.insert("triple-c.mission-control".to_string(), project.mission_control_enabled.to_string());
|
||||
labels.insert("triple-c.custom-env-fingerprint".to_string(), custom_env_fingerprint.clone());
|
||||
labels.insert("triple-c.instructions-fingerprint".to_string(),
|
||||
combined_instructions.as_ref().map(|s| sha256_hex(s)).unwrap_or_default());
|
||||
labels.insert("triple-c.git-user-name".to_string(), project.git_user_name.clone().unwrap_or_default());
|
||||
labels.insert("triple-c.git-user-email".to_string(), project.git_user_email.clone().unwrap_or_default());
|
||||
labels.insert("triple-c.git-token-hash".to_string(),
|
||||
project.git_token.as_ref().map(|t| sha256_hex(t)).unwrap_or_default());
|
||||
|
||||
let host_config = HostConfig {
|
||||
mounts: Some(mounts),
|
||||
@@ -948,11 +955,11 @@ pub async fn container_needs_recreation(
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── LiteLLM config fingerprint ───────────────────────────────────────
|
||||
let expected_litellm_fp = compute_litellm_fingerprint(project);
|
||||
let container_litellm_fp = get_label("triple-c.litellm-fingerprint").unwrap_or_default();
|
||||
if container_litellm_fp != expected_litellm_fp {
|
||||
log::info!("LiteLLM config mismatch");
|
||||
// ── OpenAI Compatible config fingerprint ────────────────────────────
|
||||
let expected_oai_fp = compute_openai_compatible_fingerprint(project);
|
||||
let container_oai_fp = get_label("triple-c.openai-compatible-fingerprint").unwrap_or_default();
|
||||
if container_oai_fp != expected_oai_fp {
|
||||
log::info!("OpenAI Compatible config mismatch");
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
@@ -1000,41 +1007,32 @@ pub async fn container_needs_recreation(
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── Git environment variables ────────────────────────────────────────
|
||||
let env_vars = info
|
||||
.config
|
||||
.as_ref()
|
||||
.and_then(|c| c.env.as_ref());
|
||||
|
||||
let get_env = |name: &str| -> Option<String> {
|
||||
env_vars.and_then(|vars| {
|
||||
vars.iter()
|
||||
.find(|v| v.starts_with(&format!("{}=", name)))
|
||||
.map(|v| v[name.len() + 1..].to_string())
|
||||
})
|
||||
};
|
||||
|
||||
let container_git_name = get_env("GIT_USER_NAME");
|
||||
let container_git_email = get_env("GIT_USER_EMAIL");
|
||||
let container_git_token = get_env("GIT_TOKEN");
|
||||
|
||||
if container_git_name.as_deref() != project.git_user_name.as_deref() {
|
||||
log::info!("GIT_USER_NAME mismatch (container={:?}, project={:?})", container_git_name, project.git_user_name);
|
||||
// ── Git settings (label-based to avoid stale snapshot env vars) ─────
|
||||
let expected_git_name = project.git_user_name.clone().unwrap_or_default();
|
||||
let container_git_name = get_label("triple-c.git-user-name").unwrap_or_default();
|
||||
if container_git_name != expected_git_name {
|
||||
log::info!("GIT_USER_NAME mismatch (container={:?}, project={:?})", container_git_name, expected_git_name);
|
||||
return Ok(true);
|
||||
}
|
||||
if container_git_email.as_deref() != project.git_user_email.as_deref() {
|
||||
log::info!("GIT_USER_EMAIL mismatch (container={:?}, project={:?})", container_git_email, project.git_user_email);
|
||||
|
||||
let expected_git_email = project.git_user_email.clone().unwrap_or_default();
|
||||
let container_git_email = get_label("triple-c.git-user-email").unwrap_or_default();
|
||||
if container_git_email != expected_git_email {
|
||||
log::info!("GIT_USER_EMAIL mismatch (container={:?}, project={:?})", container_git_email, expected_git_email);
|
||||
return Ok(true);
|
||||
}
|
||||
if container_git_token.as_deref() != project.git_token.as_deref() {
|
||||
|
||||
let expected_git_token_hash = project.git_token.as_ref().map(|t| sha256_hex(t)).unwrap_or_default();
|
||||
let container_git_token_hash = get_label("triple-c.git-token-hash").unwrap_or_default();
|
||||
if container_git_token_hash != expected_git_token_hash {
|
||||
log::info!("GIT_TOKEN mismatch");
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── Custom environment variables ──────────────────────────────────────
|
||||
// ── Custom environment variables (label-based fingerprint) ──────────
|
||||
let merged_env = merge_custom_env_vars(global_custom_env_vars, &project.custom_env_vars);
|
||||
let expected_fingerprint = compute_env_fingerprint(&merged_env);
|
||||
let container_fingerprint = get_env("TRIPLE_C_CUSTOM_ENV").unwrap_or_default();
|
||||
let container_fingerprint = get_label("triple-c.custom-env-fingerprint").unwrap_or_default();
|
||||
if container_fingerprint != expected_fingerprint {
|
||||
log::info!("Custom env vars mismatch (container={:?}, expected={:?})", container_fingerprint, expected_fingerprint);
|
||||
return Ok(true);
|
||||
@@ -1048,15 +1046,16 @@ pub async fn container_needs_recreation(
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── Claude instructions ───────────────────────────────────────────────
|
||||
// ── Claude instructions (label-based fingerprint) ─────────────────────
|
||||
let expected_instructions = build_claude_instructions(
|
||||
global_claude_instructions,
|
||||
project.claude_instructions.as_deref(),
|
||||
&project.port_mappings,
|
||||
project.mission_control_enabled,
|
||||
);
|
||||
let container_instructions = get_env("CLAUDE_INSTRUCTIONS");
|
||||
if container_instructions.as_deref() != expected_instructions.as_deref() {
|
||||
let expected_instructions_fp = expected_instructions.as_ref().map(|s| sha256_hex(s)).unwrap_or_default();
|
||||
let container_instructions_fp = get_label("triple-c.instructions-fingerprint").unwrap_or_default();
|
||||
if container_instructions_fp != expected_instructions_fp {
|
||||
log::info!("CLAUDE_INSTRUCTIONS mismatch");
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
@@ -3,44 +3,55 @@ mod docker;
|
||||
mod logging;
|
||||
mod models;
|
||||
mod storage;
|
||||
pub mod web_terminal;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use docker::exec::ExecSessionManager;
|
||||
use storage::projects_store::ProjectsStore;
|
||||
use storage::settings_store::SettingsStore;
|
||||
use storage::mcp_store::McpStore;
|
||||
use tauri::Manager;
|
||||
use web_terminal::WebTerminalServer;
|
||||
|
||||
pub struct AppState {
|
||||
pub projects_store: ProjectsStore,
|
||||
pub settings_store: SettingsStore,
|
||||
pub mcp_store: McpStore,
|
||||
pub exec_manager: ExecSessionManager,
|
||||
pub projects_store: Arc<ProjectsStore>,
|
||||
pub settings_store: Arc<SettingsStore>,
|
||||
pub mcp_store: Arc<McpStore>,
|
||||
pub exec_manager: Arc<ExecSessionManager>,
|
||||
pub web_terminal_server: Arc<tokio::sync::Mutex<Option<WebTerminalServer>>>,
|
||||
}
|
||||
|
||||
pub fn run() {
|
||||
logging::init();
|
||||
|
||||
let projects_store = match ProjectsStore::new() {
|
||||
let projects_store = Arc::new(match ProjectsStore::new() {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
log::error!("Failed to initialize projects store: {}", e);
|
||||
panic!("Failed to initialize projects store: {}", e);
|
||||
}
|
||||
};
|
||||
let settings_store = match SettingsStore::new() {
|
||||
});
|
||||
let settings_store = Arc::new(match SettingsStore::new() {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
log::error!("Failed to initialize settings store: {}", e);
|
||||
panic!("Failed to initialize settings store: {}", e);
|
||||
}
|
||||
};
|
||||
let mcp_store = match McpStore::new() {
|
||||
});
|
||||
let mcp_store = Arc::new(match McpStore::new() {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
log::error!("Failed to initialize MCP store: {}", e);
|
||||
panic!("Failed to initialize MCP store: {}", e);
|
||||
}
|
||||
};
|
||||
});
|
||||
let exec_manager = Arc::new(ExecSessionManager::new());
|
||||
|
||||
// Clone Arcs for the setup closure (web terminal auto-start)
|
||||
let projects_store_setup = projects_store.clone();
|
||||
let settings_store_setup = settings_store.clone();
|
||||
let exec_manager_setup = exec_manager.clone();
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_store::Builder::default().build())
|
||||
@@ -50,9 +61,10 @@ pub fn run() {
|
||||
projects_store,
|
||||
settings_store,
|
||||
mcp_store,
|
||||
exec_manager: ExecSessionManager::new(),
|
||||
exec_manager,
|
||||
web_terminal_server: Arc::new(tokio::sync::Mutex::new(None)),
|
||||
})
|
||||
.setup(|app| {
|
||||
.setup(move |app| {
|
||||
match tauri::image::Image::from_bytes(include_bytes!("../icons/icon.png")) {
|
||||
Ok(icon) => {
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
@@ -63,12 +75,54 @@ pub fn run() {
|
||||
log::error!("Failed to load window icon: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-start web terminal server if enabled in settings
|
||||
let settings = settings_store_setup.get();
|
||||
if settings.web_terminal.enabled {
|
||||
if let Some(token) = &settings.web_terminal.access_token {
|
||||
let token = token.clone();
|
||||
let port = settings.web_terminal.port;
|
||||
let exec_mgr = exec_manager_setup.clone();
|
||||
let proj_store = projects_store_setup.clone();
|
||||
let set_store = settings_store_setup.clone();
|
||||
let state = app.state::<AppState>();
|
||||
let web_server_mutex = state.web_terminal_server.clone();
|
||||
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match WebTerminalServer::start(
|
||||
port,
|
||||
token,
|
||||
exec_mgr,
|
||||
proj_store,
|
||||
set_store,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(server) => {
|
||||
let mut guard = web_server_mutex.lock().await;
|
||||
*guard = Some(server);
|
||||
log::info!("Web terminal auto-started on port {}", port);
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to auto-start web terminal: {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.on_window_event(|window, event| {
|
||||
if let tauri::WindowEvent::CloseRequested { .. } = event {
|
||||
let state = window.state::<AppState>();
|
||||
tauri::async_runtime::block_on(async {
|
||||
// Stop web terminal server
|
||||
let mut server_guard = state.web_terminal_server.lock().await;
|
||||
if let Some(server) = server_guard.take() {
|
||||
server.stop();
|
||||
}
|
||||
// Close all exec sessions
|
||||
state.exec_manager.close_all_sessions().await;
|
||||
});
|
||||
}
|
||||
@@ -120,6 +174,13 @@ pub fn run() {
|
||||
commands::update_commands::get_app_version,
|
||||
commands::update_commands::check_for_updates,
|
||||
commands::update_commands::check_image_update,
|
||||
// Help
|
||||
commands::help_commands::get_help_content,
|
||||
// Web Terminal
|
||||
commands::web_terminal_commands::start_web_terminal,
|
||||
commands::web_terminal_commands::stop_web_terminal,
|
||||
commands::web_terminal_commands::get_web_terminal_status,
|
||||
commands::web_terminal_commands::regenerate_web_terminal_token,
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
@@ -74,6 +74,32 @@ pub struct AppSettings {
|
||||
pub default_microphone: Option<String>,
|
||||
#[serde(default)]
|
||||
pub dismissed_image_digest: Option<String>,
|
||||
#[serde(default)]
|
||||
pub web_terminal: WebTerminalSettings,
|
||||
}
|
||||
|
||||
fn default_web_terminal_port() -> u16 {
|
||||
7681
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WebTerminalSettings {
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
#[serde(default = "default_web_terminal_port")]
|
||||
pub port: u16,
|
||||
#[serde(default)]
|
||||
pub access_token: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for WebTerminalSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
port: 7681,
|
||||
access_token: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AppSettings {
|
||||
@@ -93,6 +119,7 @@ impl Default for AppSettings {
|
||||
timezone: None,
|
||||
default_microphone: None,
|
||||
dismissed_image_digest: None,
|
||||
web_terminal: WebTerminalSettings::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ pub struct ContainerInfo {
|
||||
|
||||
pub const LOCAL_IMAGE_NAME: &str = "triple-c";
|
||||
pub const IMAGE_TAG: &str = "latest";
|
||||
pub const REGISTRY_IMAGE: &str = "repo.anhonesthost.net/cybercovellc/triple-c/triple-c-sandbox:latest";
|
||||
pub const REGISTRY_IMAGE: &str = "ghcr.io/shadowdao/triple-c-sandbox:latest";
|
||||
|
||||
pub fn local_build_image_name() -> String {
|
||||
format!("{LOCAL_IMAGE_NAME}:{IMAGE_TAG}")
|
||||
|
||||
@@ -24,6 +24,10 @@ fn default_protocol() -> String {
|
||||
"tcp".to_string()
|
||||
}
|
||||
|
||||
fn default_full_permissions() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Project {
|
||||
pub id: String,
|
||||
@@ -35,10 +39,13 @@ pub struct Project {
|
||||
pub backend: Backend,
|
||||
pub bedrock_config: Option<BedrockConfig>,
|
||||
pub ollama_config: Option<OllamaConfig>,
|
||||
pub litellm_config: Option<LiteLlmConfig>,
|
||||
#[serde(alias = "litellm_config")]
|
||||
pub openai_compatible_config: Option<OpenAiCompatibleConfig>,
|
||||
pub allow_docker_access: bool,
|
||||
#[serde(default)]
|
||||
pub mission_control_enabled: bool,
|
||||
#[serde(default = "default_full_permissions")]
|
||||
pub full_permissions: bool,
|
||||
pub ssh_key_path: Option<String>,
|
||||
#[serde(skip_serializing, default)]
|
||||
pub git_token: Option<String>,
|
||||
@@ -70,7 +77,7 @@ pub enum ProjectStatus {
|
||||
/// - `Anthropic`: Direct Anthropic API (user runs `claude login` inside the container)
|
||||
/// - `Bedrock`: AWS Bedrock with per-project AWS credentials
|
||||
/// - `Ollama`: Local or remote Ollama server
|
||||
/// - `LiteLlm`: LiteLLM proxy gateway for 100+ model providers
|
||||
/// - `OpenAiCompatible`: Any OpenAI API-compatible endpoint (e.g., LiteLLM, vLLM, etc.)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Backend {
|
||||
@@ -79,8 +86,8 @@ pub enum Backend {
|
||||
Anthropic,
|
||||
Bedrock,
|
||||
Ollama,
|
||||
#[serde(alias = "litellm")]
|
||||
LiteLlm,
|
||||
#[serde(alias = "lite_llm", alias = "litellm")]
|
||||
OpenAiCompatible,
|
||||
}
|
||||
|
||||
impl Default for Backend {
|
||||
@@ -132,13 +139,14 @@ pub struct OllamaConfig {
|
||||
pub model_id: Option<String>,
|
||||
}
|
||||
|
||||
/// LiteLLM gateway configuration for a project.
|
||||
/// LiteLLM translates Anthropic API calls to 100+ model providers.
|
||||
/// OpenAI Compatible endpoint configuration for a project.
|
||||
/// Routes Anthropic API calls through any OpenAI API-compatible endpoint
|
||||
/// (e.g., LiteLLM, vLLM, or other compatible gateways).
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LiteLlmConfig {
|
||||
/// The base URL of the LiteLLM proxy (e.g., "http://host.docker.internal:4000" or "https://litellm.example.com")
|
||||
pub struct OpenAiCompatibleConfig {
|
||||
/// The base URL of the OpenAI-compatible endpoint (e.g., "http://host.docker.internal:4000" or "https://api.example.com")
|
||||
pub base_url: String,
|
||||
/// API key for the LiteLLM proxy
|
||||
/// API key for the OpenAI-compatible endpoint
|
||||
#[serde(skip_serializing, default)]
|
||||
pub api_key: Option<String>,
|
||||
/// Optional model override
|
||||
@@ -157,9 +165,10 @@ impl Project {
|
||||
backend: Backend::default(),
|
||||
bedrock_config: None,
|
||||
ollama_config: None,
|
||||
litellm_config: None,
|
||||
openai_compatible_config: None,
|
||||
allow_docker_access: false,
|
||||
mission_control_enabled: false,
|
||||
full_permissions: false,
|
||||
ssh_key_path: None,
|
||||
git_token: None,
|
||||
git_user_name: None,
|
||||
|
||||
@@ -18,19 +18,19 @@ pub struct ReleaseAsset {
|
||||
pub size: u64,
|
||||
}
|
||||
|
||||
/// Gitea API release response (internal).
|
||||
/// GitHub API release response (internal).
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct GiteaRelease {
|
||||
pub struct GitHubRelease {
|
||||
pub tag_name: String,
|
||||
pub html_url: String,
|
||||
pub body: String,
|
||||
pub assets: Vec<GiteaAsset>,
|
||||
pub assets: Vec<GitHubAsset>,
|
||||
pub published_at: String,
|
||||
}
|
||||
|
||||
/// Gitea API asset response (internal).
|
||||
/// GitHub API asset response (internal).
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct GiteaAsset {
|
||||
pub struct GitHubAsset {
|
||||
pub name: String,
|
||||
pub browser_download_url: String,
|
||||
pub size: u64,
|
||||
|
||||
4
app/src-tauri/src/web_terminal/mod.rs
Normal file
4
app/src-tauri/src/web_terminal/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod server;
|
||||
mod ws_handler;
|
||||
|
||||
pub use server::WebTerminalServer;
|
||||
155
app/src-tauri/src/web_terminal/server.rs
Normal file
155
app/src-tauri/src/web_terminal/server.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::extract::{Query, State as AxumState, WebSocketUpgrade};
|
||||
use axum::response::{Html, IntoResponse};
|
||||
use axum::routing::get;
|
||||
use axum::Router;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::watch;
|
||||
use tower_http::cors::CorsLayer;
|
||||
|
||||
use crate::docker::exec::ExecSessionManager;
|
||||
use crate::storage::projects_store::ProjectsStore;
|
||||
use crate::storage::settings_store::SettingsStore;
|
||||
|
||||
use super::ws_handler;
|
||||
|
||||
/// Shared state passed to all axum handlers.
|
||||
pub struct WebTerminalState {
|
||||
pub exec_manager: Arc<ExecSessionManager>,
|
||||
pub projects_store: Arc<ProjectsStore>,
|
||||
pub settings_store: Arc<SettingsStore>,
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
/// Manages the lifecycle of the axum HTTP+WS server.
|
||||
pub struct WebTerminalServer {
|
||||
shutdown_tx: watch::Sender<()>,
|
||||
port: u16,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TokenQuery {
|
||||
pub token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ProjectInfo {
|
||||
id: String,
|
||||
name: String,
|
||||
status: String,
|
||||
}
|
||||
|
||||
impl WebTerminalServer {
|
||||
/// Start the web terminal server on the given port.
|
||||
pub async fn start(
|
||||
port: u16,
|
||||
access_token: String,
|
||||
exec_manager: Arc<ExecSessionManager>,
|
||||
projects_store: Arc<ProjectsStore>,
|
||||
settings_store: Arc<SettingsStore>,
|
||||
) -> Result<Self, String> {
|
||||
let (shutdown_tx, shutdown_rx) = watch::channel(());
|
||||
|
||||
let shared_state = Arc::new(WebTerminalState {
|
||||
exec_manager,
|
||||
projects_store,
|
||||
settings_store,
|
||||
access_token,
|
||||
});
|
||||
|
||||
let app = Router::new()
|
||||
.route("/", get(serve_html))
|
||||
.route("/ws", get(ws_upgrade))
|
||||
.route("/api/projects", get(list_projects))
|
||||
.layer(CorsLayer::permissive())
|
||||
.with_state(shared_state);
|
||||
|
||||
let addr = format!("0.0.0.0:{}", port);
|
||||
let listener = tokio::net::TcpListener::bind(&addr)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to bind web terminal to {}: {}", addr, e))?;
|
||||
|
||||
log::info!("Web terminal server listening on {}", addr);
|
||||
|
||||
let mut shutdown_rx_clone = shutdown_rx.clone();
|
||||
tokio::spawn(async move {
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(async move {
|
||||
let _ = shutdown_rx_clone.changed().await;
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
log::error!("Web terminal server error: {}", e);
|
||||
});
|
||||
log::info!("Web terminal server shut down");
|
||||
});
|
||||
|
||||
Ok(Self { shutdown_tx, port })
|
||||
}
|
||||
|
||||
/// Stop the server gracefully.
|
||||
pub fn stop(&self) {
|
||||
log::info!("Stopping web terminal server on port {}", self.port);
|
||||
let _ = self.shutdown_tx.send(());
|
||||
}
|
||||
|
||||
pub fn port(&self) -> u16 {
|
||||
self.port
|
||||
}
|
||||
}
|
||||
|
||||
/// Serve the embedded HTML page.
|
||||
async fn serve_html() -> Html<&'static str> {
|
||||
Html(include_str!("terminal.html"))
|
||||
}
|
||||
|
||||
/// Validate token from query params.
|
||||
fn validate_token(state: &WebTerminalState, token: &Option<String>) -> bool {
|
||||
match token {
|
||||
Some(t) => t == &state.access_token,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// WebSocket upgrade handler.
|
||||
async fn ws_upgrade(
|
||||
ws: WebSocketUpgrade,
|
||||
AxumState(state): AxumState<Arc<WebTerminalState>>,
|
||||
Query(query): Query<TokenQuery>,
|
||||
) -> impl IntoResponse {
|
||||
if !validate_token(&state, &query.token) {
|
||||
return (axum::http::StatusCode::UNAUTHORIZED, "Invalid token").into_response();
|
||||
}
|
||||
ws.on_upgrade(move |socket| ws_handler::handle_connection(socket, state))
|
||||
.into_response()
|
||||
}
|
||||
|
||||
/// List running projects (REST endpoint).
|
||||
async fn list_projects(
|
||||
AxumState(state): AxumState<Arc<WebTerminalState>>,
|
||||
Query(query): Query<TokenQuery>,
|
||||
) -> impl IntoResponse {
|
||||
if !validate_token(&state, &query.token) {
|
||||
return (
|
||||
axum::http::StatusCode::UNAUTHORIZED,
|
||||
axum::Json(serde_json::json!({"error": "Invalid token"})),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
|
||||
let projects = state.projects_store.list();
|
||||
let infos: Vec<ProjectInfo> = projects
|
||||
.into_iter()
|
||||
.map(|p| ProjectInfo {
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
status: serde_json::to_value(&p.status)
|
||||
.ok()
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string()))
|
||||
.unwrap_or_else(|| "unknown".to_string()),
|
||||
})
|
||||
.collect();
|
||||
|
||||
axum::Json(infos).into_response()
|
||||
}
|
||||
662
app/src-tauri/src/web_terminal/terminal.html
Normal file
662
app/src-tauri/src/web_terminal/terminal.html
Normal file
@@ -0,0 +1,662 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
||||
<title>Triple-C Web Terminal</title>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@xterm/xterm@5.5.0/css/xterm.min.css">
|
||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/xterm@5.5.0/lib/xterm.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/addon-fit@0.10.0/lib/addon-fit.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/addon-web-links@0.11.0/lib/addon-web-links.min.js"></script>
|
||||
<style>
|
||||
:root {
|
||||
--bg-primary: #1a1b26;
|
||||
--bg-secondary: #24283b;
|
||||
--bg-tertiary: #2f3347;
|
||||
--text-primary: #c0caf5;
|
||||
--text-secondary: #565f89;
|
||||
--accent: #7aa2f7;
|
||||
--accent-hover: #89b4fa;
|
||||
--border: #3b3f57;
|
||||
--success: #9ece6a;
|
||||
--warning: #e0af68;
|
||||
--error: #f7768e;
|
||||
}
|
||||
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
|
||||
body {
|
||||
background: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
-webkit-tap-highlight-color: transparent;
|
||||
}
|
||||
|
||||
/* ── Top Bar ─────────────────────────────── */
|
||||
.topbar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 6px 12px;
|
||||
background: var(--bg-secondary);
|
||||
border-bottom: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
min-height: 42px;
|
||||
}
|
||||
|
||||
.topbar-title {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
color: var(--accent);
|
||||
white-space: nowrap;
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
.status-dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: var(--error);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.status-dot.connected { background: var(--success); }
|
||||
.status-dot.reconnecting { background: var(--warning); animation: pulse 1s infinite; }
|
||||
|
||||
@keyframes pulse { 0%,100% { opacity: 1; } 50% { opacity: 0.4; } }
|
||||
|
||||
select, button {
|
||||
font-size: 12px;
|
||||
padding: 4px 8px;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
|
||||
select:focus, button:focus { outline: none; border-color: var(--accent); }
|
||||
button:hover { background: var(--border); }
|
||||
button:active { background: var(--accent); color: var(--bg-primary); }
|
||||
|
||||
.btn-new {
|
||||
font-weight: 600;
|
||||
min-width: 44px;
|
||||
min-height: 32px;
|
||||
}
|
||||
|
||||
/* ── Tab Bar ─────────────────────────────── */
|
||||
.tabbar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1px;
|
||||
padding: 0 8px;
|
||||
background: var(--bg-secondary);
|
||||
border-bottom: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
overflow-x: auto;
|
||||
-webkit-overflow-scrolling: touch;
|
||||
min-height: 32px;
|
||||
}
|
||||
|
||||
.tab {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 6px 12px;
|
||||
font-size: 11px;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
border-bottom: 2px solid transparent;
|
||||
transition: all 0.15s;
|
||||
min-height: 32px;
|
||||
}
|
||||
|
||||
.tab:hover { color: var(--text-primary); }
|
||||
.tab.active {
|
||||
color: var(--text-primary);
|
||||
border-bottom-color: var(--accent);
|
||||
}
|
||||
|
||||
.tab-close {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
border-radius: 3px;
|
||||
font-size: 12px;
|
||||
line-height: 1;
|
||||
color: var(--text-secondary);
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
min-width: unset;
|
||||
min-height: unset;
|
||||
}
|
||||
.tab-close:hover { background: var(--error); color: white; }
|
||||
|
||||
/* ── Terminal Area ───────────────────────── */
|
||||
.terminal-area {
|
||||
flex: 1;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.terminal-container {
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
display: none;
|
||||
padding: 4px;
|
||||
}
|
||||
.terminal-container.active { display: block; }
|
||||
|
||||
/* ── Input Bar (mobile/tablet) ──────────── */
|
||||
.input-bar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 6px 8px;
|
||||
background: var(--bg-secondary);
|
||||
border-top: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.input-bar input {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
padding: 8px 10px;
|
||||
font-size: 16px; /* prevents iOS zoom on focus */
|
||||
font-family: 'Cascadia Code', 'Fira Code', 'JetBrains Mono', 'Menlo', monospace;
|
||||
background: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 6px;
|
||||
outline: none;
|
||||
-webkit-appearance: none;
|
||||
}
|
||||
.input-bar input:focus { border-color: var(--accent); }
|
||||
|
||||
.input-bar .key-btn {
|
||||
padding: 8px 10px;
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
min-width: 40px;
|
||||
min-height: 36px;
|
||||
border-radius: 6px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* ── Scroll-to-bottom FAB ──────────────── */
|
||||
.scroll-bottom-btn {
|
||||
position: absolute;
|
||||
bottom: 12px;
|
||||
right: 16px;
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent);
|
||||
color: var(--bg-primary);
|
||||
border: none;
|
||||
font-size: 18px;
|
||||
font-weight: bold;
|
||||
display: none;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
cursor: pointer;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.4);
|
||||
z-index: 10;
|
||||
padding: 0;
|
||||
min-width: unset;
|
||||
min-height: unset;
|
||||
line-height: 1;
|
||||
}
|
||||
.scroll-bottom-btn:hover { background: var(--accent-hover); }
|
||||
.scroll-bottom-btn.visible { display: flex; }
|
||||
|
||||
/* ── Empty State ─────────────────────────── */
|
||||
.empty-state {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
color: var(--text-secondary);
|
||||
font-size: 14px;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.empty-state .hint {
|
||||
font-size: 12px;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
/* ── Scrollbar ───────────────────────────── */
|
||||
::-webkit-scrollbar { width: 6px; height: 6px; }
|
||||
::-webkit-scrollbar-track { background: transparent; }
|
||||
::-webkit-scrollbar-thumb { background: var(--border); border-radius: 3px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<!-- Top Bar -->
|
||||
<div class="topbar">
|
||||
<span class="topbar-title">Triple-C</span>
|
||||
<span class="status-dot" id="statusDot"></span>
|
||||
<select id="projectSelect" style="flex:1; max-width:240px;">
|
||||
<option value="">Select project...</option>
|
||||
</select>
|
||||
<button class="btn-new" id="btnClaude" title="New Claude session">Claude</button>
|
||||
<button class="btn-new" id="btnBash" title="New Bash session">Bash</button>
|
||||
</div>
|
||||
|
||||
<!-- Tab Bar -->
|
||||
<div class="tabbar" id="tabbar"></div>
|
||||
|
||||
<!-- Terminal Area -->
|
||||
<div class="terminal-area" id="terminalArea">
|
||||
<div class="empty-state" id="emptyState">
|
||||
<div>Select a project and open a terminal session</div>
|
||||
<div class="hint">Use the buttons above to start a Claude or Bash session</div>
|
||||
</div>
|
||||
<button class="scroll-bottom-btn" id="scrollBottomBtn" title="Scroll to bottom">↓</button>
|
||||
</div>
|
||||
|
||||
<!-- Input Bar for mobile/tablet -->
|
||||
<div class="input-bar" id="inputBar">
|
||||
<input type="text" id="mobileInput" placeholder="Type here..."
|
||||
autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"
|
||||
enterkeyhint="send" inputmode="text">
|
||||
<button class="key-btn" id="btnEnter">Enter</button>
|
||||
<button class="key-btn" id="btnTab">Tab</button>
|
||||
<button class="key-btn" id="btnCtrlC">^C</button>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
// ── State ──────────────────────────────────
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const TOKEN = params.get('token') || '';
|
||||
let ws = null;
|
||||
let reconnectTimer = null;
|
||||
let sessions = {}; // { sessionId: { term, fitAddon, projectName, type, containerId } }
|
||||
let activeSessionId = null;
|
||||
|
||||
// ── DOM refs ───────────────────────────────
|
||||
const statusDot = document.getElementById('statusDot');
|
||||
const projectSelect = document.getElementById('projectSelect');
|
||||
const btnClaude = document.getElementById('btnClaude');
|
||||
const btnBash = document.getElementById('btnBash');
|
||||
const tabbar = document.getElementById('tabbar');
|
||||
const terminalArea = document.getElementById('terminalArea');
|
||||
const emptyState = document.getElementById('emptyState');
|
||||
const mobileInput = document.getElementById('mobileInput');
|
||||
const btnEnter = document.getElementById('btnEnter');
|
||||
const btnTab = document.getElementById('btnTab');
|
||||
const btnCtrlC = document.getElementById('btnCtrlC');
|
||||
const scrollBottomBtn = document.getElementById('scrollBottomBtn');
|
||||
|
||||
// ── WebSocket ──────────────────────────────
|
||||
function connect() {
|
||||
const proto = location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const url = `${proto}//${location.host}/ws?token=${encodeURIComponent(TOKEN)}`;
|
||||
ws = new WebSocket(url);
|
||||
|
||||
ws.onopen = () => {
|
||||
statusDot.className = 'status-dot connected';
|
||||
clearTimeout(reconnectTimer);
|
||||
send({ type: 'list_projects' });
|
||||
// Start keepalive
|
||||
ws._pingInterval = setInterval(() => send({ type: 'ping' }), 30000);
|
||||
};
|
||||
|
||||
ws.onmessage = (evt) => {
|
||||
try {
|
||||
const msg = JSON.parse(evt.data);
|
||||
handleMessage(msg);
|
||||
} catch (e) {
|
||||
console.error('Parse error:', e);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
statusDot.className = 'status-dot reconnecting';
|
||||
if (ws && ws._pingInterval) clearInterval(ws._pingInterval);
|
||||
reconnectTimer = setTimeout(connect, 2000);
|
||||
};
|
||||
|
||||
ws.onerror = () => {
|
||||
ws.close();
|
||||
};
|
||||
}
|
||||
|
||||
function send(msg) {
|
||||
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(JSON.stringify(msg));
|
||||
}
|
||||
}
|
||||
|
||||
// ── Message handling ───────────────────────
|
||||
function handleMessage(msg) {
|
||||
switch (msg.type) {
|
||||
case 'projects':
|
||||
updateProjectList(msg.projects);
|
||||
break;
|
||||
case 'opened':
|
||||
onSessionOpened(msg.session_id, msg.project_name);
|
||||
break;
|
||||
case 'output':
|
||||
onSessionOutput(msg.session_id, msg.data);
|
||||
break;
|
||||
case 'exit':
|
||||
onSessionExit(msg.session_id);
|
||||
break;
|
||||
case 'error':
|
||||
console.error('Server error:', msg.message);
|
||||
// Show in active terminal if available
|
||||
if (activeSessionId && sessions[activeSessionId]) {
|
||||
sessions[activeSessionId].term.writeln(`\r\n\x1b[31mError: ${msg.message}\x1b[0m`);
|
||||
}
|
||||
break;
|
||||
case 'pong':
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function updateProjectList(projects) {
|
||||
const current = projectSelect.value;
|
||||
projectSelect.innerHTML = '<option value="">Select project...</option>';
|
||||
projects.forEach(p => {
|
||||
const opt = document.createElement('option');
|
||||
opt.value = p.id;
|
||||
opt.textContent = `${p.name} (${p.status})`;
|
||||
opt.disabled = p.status !== 'running';
|
||||
projectSelect.appendChild(opt);
|
||||
});
|
||||
// Restore selection if still valid
|
||||
if (current) projectSelect.value = current;
|
||||
}
|
||||
|
||||
// ── Session management ─────────────────────
|
||||
let pendingSessionType = null;
|
||||
|
||||
function openSession(type) {
|
||||
const projectId = projectSelect.value;
|
||||
if (!projectId) {
|
||||
alert('Please select a running project first.');
|
||||
return;
|
||||
}
|
||||
pendingSessionType = type;
|
||||
send({
|
||||
type: 'open',
|
||||
project_id: projectId,
|
||||
session_type: type,
|
||||
});
|
||||
}
|
||||
|
||||
function onSessionOpened(sessionId, projectName) {
|
||||
const sessionType = pendingSessionType || 'claude';
|
||||
pendingSessionType = null;
|
||||
|
||||
// Create terminal
|
||||
const term = new Terminal({
|
||||
theme: {
|
||||
background: '#1a1b26',
|
||||
foreground: '#c0caf5',
|
||||
cursor: '#c0caf5',
|
||||
selectionBackground: '#33467c',
|
||||
black: '#15161e',
|
||||
red: '#f7768e',
|
||||
green: '#9ece6a',
|
||||
yellow: '#e0af68',
|
||||
blue: '#7aa2f7',
|
||||
magenta: '#bb9af7',
|
||||
cyan: '#7dcfff',
|
||||
white: '#a9b1d6',
|
||||
brightBlack: '#414868',
|
||||
brightRed: '#f7768e',
|
||||
brightGreen: '#9ece6a',
|
||||
brightYellow: '#e0af68',
|
||||
brightBlue: '#7aa2f7',
|
||||
brightMagenta: '#bb9af7',
|
||||
brightCyan: '#7dcfff',
|
||||
brightWhite: '#c0caf5',
|
||||
},
|
||||
fontSize: 14,
|
||||
fontFamily: "'Cascadia Code', 'Fira Code', 'JetBrains Mono', 'Menlo', monospace",
|
||||
cursorBlink: true,
|
||||
allowProposedApi: true,
|
||||
});
|
||||
|
||||
const fitAddon = new FitAddon.FitAddon();
|
||||
term.loadAddon(fitAddon);
|
||||
|
||||
const webLinksAddon = new WebLinksAddon.WebLinksAddon();
|
||||
term.loadAddon(webLinksAddon);
|
||||
|
||||
// Create container div
|
||||
const container = document.createElement('div');
|
||||
container.className = 'terminal-container';
|
||||
container.id = `term-${sessionId}`;
|
||||
terminalArea.appendChild(container);
|
||||
|
||||
term.open(container);
|
||||
fitAddon.fit();
|
||||
|
||||
// Send initial resize
|
||||
send({
|
||||
type: 'resize',
|
||||
session_id: sessionId,
|
||||
cols: term.cols,
|
||||
rows: term.rows,
|
||||
});
|
||||
|
||||
// Handle user input
|
||||
term.onData(data => {
|
||||
const bytes = new TextEncoder().encode(data);
|
||||
const b64 = btoa(String.fromCharCode(...bytes));
|
||||
send({
|
||||
type: 'input',
|
||||
session_id: sessionId,
|
||||
data: b64,
|
||||
});
|
||||
});
|
||||
|
||||
// Track scroll position for scroll-to-bottom button
|
||||
term.onScroll(() => updateScrollButton());
|
||||
|
||||
// Store session
|
||||
sessions[sessionId] = { term, fitAddon, projectName, type: sessionType, container };
|
||||
|
||||
// Add tab and switch to it
|
||||
addTab(sessionId, projectName, sessionType);
|
||||
switchToSession(sessionId);
|
||||
|
||||
emptyState.style.display = 'none';
|
||||
}
|
||||
|
||||
function onSessionOutput(sessionId, b64data) {
|
||||
const session = sessions[sessionId];
|
||||
if (!session) return;
|
||||
const bytes = Uint8Array.from(atob(b64data), c => c.charCodeAt(0));
|
||||
session.term.write(bytes);
|
||||
// Update scroll button if this is the active session
|
||||
if (sessionId === activeSessionId) updateScrollButton();
|
||||
}
|
||||
|
||||
function onSessionExit(sessionId) {
|
||||
const session = sessions[sessionId];
|
||||
if (!session) return;
|
||||
session.term.writeln('\r\n\x1b[90m[Session ended]\x1b[0m');
|
||||
}
|
||||
|
||||
function closeSession(sessionId) {
|
||||
send({ type: 'close', session_id: sessionId });
|
||||
removeSession(sessionId);
|
||||
}
|
||||
|
||||
function removeSession(sessionId) {
|
||||
const session = sessions[sessionId];
|
||||
if (!session) return;
|
||||
|
||||
session.term.dispose();
|
||||
session.container.remove();
|
||||
delete sessions[sessionId];
|
||||
|
||||
// Remove tab
|
||||
const tab = document.getElementById(`tab-${sessionId}`);
|
||||
if (tab) tab.remove();
|
||||
|
||||
// Switch to another session or show empty state
|
||||
const remaining = Object.keys(sessions);
|
||||
if (remaining.length > 0) {
|
||||
switchToSession(remaining[remaining.length - 1]);
|
||||
} else {
|
||||
activeSessionId = null;
|
||||
emptyState.style.display = '';
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tab bar ────────────────────────────────
|
||||
function addTab(sessionId, projectName, sessionType) {
|
||||
const tab = document.createElement('div');
|
||||
tab.className = 'tab';
|
||||
tab.id = `tab-${sessionId}`;
|
||||
|
||||
const label = document.createElement('span');
|
||||
label.textContent = `${projectName} (${sessionType})`;
|
||||
tab.appendChild(label);
|
||||
|
||||
const close = document.createElement('button');
|
||||
close.className = 'tab-close';
|
||||
close.textContent = '\u00d7';
|
||||
close.onclick = (e) => { e.stopPropagation(); closeSession(sessionId); };
|
||||
tab.appendChild(close);
|
||||
|
||||
tab.onclick = () => switchToSession(sessionId);
|
||||
tabbar.appendChild(tab);
|
||||
}
|
||||
|
||||
function switchToSession(sessionId) {
|
||||
activeSessionId = sessionId;
|
||||
|
||||
// Update tab styles
|
||||
document.querySelectorAll('.tab').forEach(t => t.classList.remove('active'));
|
||||
const tab = document.getElementById(`tab-${sessionId}`);
|
||||
if (tab) tab.classList.add('active');
|
||||
|
||||
// Show/hide terminal containers
|
||||
document.querySelectorAll('.terminal-container').forEach(c => c.classList.remove('active'));
|
||||
const container = document.getElementById(`term-${sessionId}`);
|
||||
if (container) {
|
||||
container.classList.add('active');
|
||||
const session = sessions[sessionId];
|
||||
if (session) {
|
||||
// Fit after making visible
|
||||
requestAnimationFrame(() => {
|
||||
session.fitAddon.fit();
|
||||
session.term.focus();
|
||||
updateScrollButton();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Resize handling ────────────────────────
|
||||
function handleResize() {
|
||||
if (activeSessionId && sessions[activeSessionId]) {
|
||||
const session = sessions[activeSessionId];
|
||||
session.fitAddon.fit();
|
||||
send({
|
||||
type: 'resize',
|
||||
session_id: activeSessionId,
|
||||
cols: session.term.cols,
|
||||
rows: session.term.rows,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let resizeTimeout;
|
||||
window.addEventListener('resize', () => {
|
||||
clearTimeout(resizeTimeout);
|
||||
resizeTimeout = setTimeout(handleResize, 100);
|
||||
});
|
||||
|
||||
// ── Send helper ─────────────────────────────
|
||||
function sendTerminalInput(str) {
|
||||
if (!activeSessionId) return;
|
||||
const bytes = new TextEncoder().encode(str);
|
||||
const b64 = btoa(String.fromCharCode(...bytes));
|
||||
send({
|
||||
type: 'input',
|
||||
session_id: activeSessionId,
|
||||
data: b64,
|
||||
});
|
||||
}
|
||||
|
||||
// ── Input bar (mobile/tablet) ──────────────
|
||||
// Send characters immediately, bypassing IME composition buffering.
|
||||
// Clearing value on each input event cancels any active composition.
|
||||
mobileInput.addEventListener('input', () => {
|
||||
const val = mobileInput.value;
|
||||
if (val) {
|
||||
sendTerminalInput(val);
|
||||
mobileInput.value = '';
|
||||
}
|
||||
});
|
||||
|
||||
// Catch Enter in the input field itself
|
||||
mobileInput.addEventListener('keydown', (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
const val = mobileInput.value;
|
||||
if (val) {
|
||||
sendTerminalInput(val);
|
||||
mobileInput.value = '';
|
||||
}
|
||||
sendTerminalInput('\r');
|
||||
} else if (e.key === 'Tab') {
|
||||
e.preventDefault();
|
||||
sendTerminalInput('\t');
|
||||
}
|
||||
});
|
||||
|
||||
btnEnter.onclick = () => { sendTerminalInput('\r'); mobileInput.focus(); };
|
||||
btnTab.onclick = () => { sendTerminalInput('\t'); mobileInput.focus(); };
|
||||
btnCtrlC.onclick = () => { sendTerminalInput('\x03'); mobileInput.focus(); };
|
||||
|
||||
// ── Scroll to bottom ──────────────────────
|
||||
function updateScrollButton() {
|
||||
if (!activeSessionId || !sessions[activeSessionId]) {
|
||||
scrollBottomBtn.classList.remove('visible');
|
||||
return;
|
||||
}
|
||||
const term = sessions[activeSessionId].term;
|
||||
const isAtBottom = term.buffer.active.viewportY >= term.buffer.active.baseY;
|
||||
scrollBottomBtn.classList.toggle('visible', !isAtBottom);
|
||||
}
|
||||
|
||||
scrollBottomBtn.onclick = () => {
|
||||
if (activeSessionId && sessions[activeSessionId]) {
|
||||
sessions[activeSessionId].term.scrollToBottom();
|
||||
scrollBottomBtn.classList.remove('visible');
|
||||
}
|
||||
};
|
||||
|
||||
// ── Event listeners ────────────────────────
|
||||
btnClaude.onclick = () => openSession('claude');
|
||||
btnBash.onclick = () => openSession('bash');
|
||||
|
||||
// ── Init ───────────────────────────────────
|
||||
connect();
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
324
app/src-tauri/src/web_terminal/ws_handler.rs
Normal file
324
app/src-tauri/src/web_terminal/ws_handler.rs
Normal file
@@ -0,0 +1,324 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::extract::ws::{Message, WebSocket};
|
||||
use base64::engine::general_purpose::STANDARD as BASE64;
|
||||
use base64::Engine;
|
||||
use futures_util::{SinkExt, StreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::models::{Backend, BedrockAuthMethod, Project, ProjectStatus};
|
||||
|
||||
use super::server::WebTerminalState;
|
||||
|
||||
// ── Wire protocol types ──────────────────────────────────────────────
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum ClientMessage {
|
||||
ListProjects,
|
||||
Open {
|
||||
project_id: String,
|
||||
session_type: Option<String>,
|
||||
},
|
||||
Input {
|
||||
session_id: String,
|
||||
data: String, // base64
|
||||
},
|
||||
Resize {
|
||||
session_id: String,
|
||||
cols: u16,
|
||||
rows: u16,
|
||||
},
|
||||
Close {
|
||||
session_id: String,
|
||||
},
|
||||
Ping,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum ServerMessage {
|
||||
Projects {
|
||||
projects: Vec<ProjectEntry>,
|
||||
},
|
||||
Opened {
|
||||
session_id: String,
|
||||
project_name: String,
|
||||
},
|
||||
Output {
|
||||
session_id: String,
|
||||
data: String, // base64
|
||||
},
|
||||
Exit {
|
||||
session_id: String,
|
||||
},
|
||||
Error {
|
||||
message: String,
|
||||
},
|
||||
Pong,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ProjectEntry {
|
||||
id: String,
|
||||
name: String,
|
||||
status: String,
|
||||
}
|
||||
|
||||
// ── Connection handler ───────────────────────────────────────────────
|
||||
|
||||
pub async fn handle_connection(socket: WebSocket, state: Arc<WebTerminalState>) {
|
||||
let (mut ws_tx, mut ws_rx) = socket.split();
|
||||
|
||||
// Channel for sending messages from session output tasks → WS writer
|
||||
let (out_tx, mut out_rx) = mpsc::unbounded_channel::<ServerMessage>();
|
||||
|
||||
// Track session IDs owned by this connection for cleanup
|
||||
let owned_sessions: Arc<tokio::sync::Mutex<Vec<String>>> =
|
||||
Arc::new(tokio::sync::Mutex::new(Vec::new()));
|
||||
|
||||
// Writer task: serializes ServerMessages and sends as WS text frames
|
||||
let writer_handle = tokio::spawn(async move {
|
||||
while let Some(msg) = out_rx.recv().await {
|
||||
if let Ok(json) = serde_json::to_string(&msg) {
|
||||
if ws_tx.send(Message::Text(json.into())).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Reader loop: parse incoming messages and dispatch
|
||||
while let Some(Ok(msg)) = ws_rx.next().await {
|
||||
let text = match &msg {
|
||||
Message::Text(t) => t.to_string(),
|
||||
Message::Close(_) => break,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let client_msg: ClientMessage = match serde_json::from_str(&text) {
|
||||
Ok(m) => m,
|
||||
Err(e) => {
|
||||
let _ = out_tx.send(ServerMessage::Error {
|
||||
message: format!("Invalid message: {}", e),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
match client_msg {
|
||||
ClientMessage::Ping => {
|
||||
let _ = out_tx.send(ServerMessage::Pong);
|
||||
}
|
||||
|
||||
ClientMessage::ListProjects => {
|
||||
let projects = state.projects_store.list();
|
||||
let entries: Vec<ProjectEntry> = projects
|
||||
.into_iter()
|
||||
.map(|p| ProjectEntry {
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
status: serde_json::to_value(&p.status)
|
||||
.ok()
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string()))
|
||||
.unwrap_or_else(|| "unknown".to_string()),
|
||||
})
|
||||
.collect();
|
||||
let _ = out_tx.send(ServerMessage::Projects { projects: entries });
|
||||
}
|
||||
|
||||
ClientMessage::Open {
|
||||
project_id,
|
||||
session_type,
|
||||
} => {
|
||||
let result = handle_open(
|
||||
&state,
|
||||
&project_id,
|
||||
session_type.as_deref(),
|
||||
&out_tx,
|
||||
&owned_sessions,
|
||||
)
|
||||
.await;
|
||||
if let Err(e) = result {
|
||||
let _ = out_tx.send(ServerMessage::Error { message: e });
|
||||
}
|
||||
}
|
||||
|
||||
ClientMessage::Input { session_id, data } => {
|
||||
match BASE64.decode(&data) {
|
||||
Ok(bytes) => {
|
||||
if let Err(e) = state.exec_manager.send_input(&session_id, bytes).await {
|
||||
let _ = out_tx.send(ServerMessage::Error {
|
||||
message: format!("Input error: {}", e),
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = out_tx.send(ServerMessage::Error {
|
||||
message: format!("Base64 decode error: {}", e),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ClientMessage::Resize {
|
||||
session_id,
|
||||
cols,
|
||||
rows,
|
||||
} => {
|
||||
if let Err(e) = state.exec_manager.resize(&session_id, cols, rows).await {
|
||||
let _ = out_tx.send(ServerMessage::Error {
|
||||
message: format!("Resize error: {}", e),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
ClientMessage::Close { session_id } => {
|
||||
state.exec_manager.close_session(&session_id).await;
|
||||
// Remove from owned list
|
||||
owned_sessions
|
||||
.lock()
|
||||
.await
|
||||
.retain(|id| id != &session_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Connection closed — clean up all owned sessions
|
||||
log::info!("Web terminal WebSocket disconnected, cleaning up sessions");
|
||||
let sessions = owned_sessions.lock().await.clone();
|
||||
for session_id in sessions {
|
||||
state.exec_manager.close_session(&session_id).await;
|
||||
}
|
||||
|
||||
writer_handle.abort();
|
||||
}
|
||||
|
||||
/// Build the command for a terminal session, mirroring terminal_commands.rs logic.
|
||||
fn build_terminal_cmd(project: &Project, settings_store: &crate::storage::settings_store::SettingsStore) -> Vec<String> {
|
||||
let is_bedrock_profile = project.backend == Backend::Bedrock
|
||||
&& project
|
||||
.bedrock_config
|
||||
.as_ref()
|
||||
.map(|b| b.auth_method == BedrockAuthMethod::Profile)
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_bedrock_profile {
|
||||
let mut cmd = vec!["claude".to_string()];
|
||||
if project.full_permissions {
|
||||
cmd.push("--dangerously-skip-permissions".to_string());
|
||||
}
|
||||
return cmd;
|
||||
}
|
||||
|
||||
let profile = project
|
||||
.bedrock_config
|
||||
.as_ref()
|
||||
.and_then(|b| b.aws_profile.clone())
|
||||
.or_else(|| settings_store.get().global_aws.aws_profile.clone())
|
||||
.unwrap_or_else(|| "default".to_string());
|
||||
|
||||
let claude_cmd = if project.full_permissions {
|
||||
"exec claude --dangerously-skip-permissions"
|
||||
} else {
|
||||
"exec claude"
|
||||
};
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
echo "Validating AWS session for profile '{profile}'..."
|
||||
if aws sts get-caller-identity --profile '{profile}' >/dev/null 2>&1; then
|
||||
echo "AWS session valid."
|
||||
else
|
||||
echo "AWS session expired or invalid."
|
||||
if aws configure get sso_start_url --profile '{profile}' >/dev/null 2>&1 || \
|
||||
aws configure get sso_session --profile '{profile}' >/dev/null 2>&1; then
|
||||
echo "Starting SSO login..."
|
||||
echo ""
|
||||
triple-c-sso-refresh
|
||||
if [ $? -ne 0 ]; then
|
||||
echo ""
|
||||
echo "SSO login failed or was cancelled. Starting Claude anyway..."
|
||||
echo "You may see authentication errors."
|
||||
echo ""
|
||||
fi
|
||||
else
|
||||
echo "Profile '{profile}' does not use SSO. Check your AWS credentials."
|
||||
echo "Starting Claude anyway..."
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
{claude_cmd}
|
||||
"#,
|
||||
profile = profile,
|
||||
claude_cmd = claude_cmd
|
||||
);
|
||||
|
||||
vec!["bash".to_string(), "-c".to_string(), script]
|
||||
}
|
||||
|
||||
/// Open a new terminal session for a project.
|
||||
async fn handle_open(
|
||||
state: &WebTerminalState,
|
||||
project_id: &str,
|
||||
session_type: Option<&str>,
|
||||
out_tx: &mpsc::UnboundedSender<ServerMessage>,
|
||||
owned_sessions: &Arc<tokio::sync::Mutex<Vec<String>>>,
|
||||
) -> Result<(), String> {
|
||||
let project = state
|
||||
.projects_store
|
||||
.get(project_id)
|
||||
.ok_or_else(|| format!("Project {} not found", project_id))?;
|
||||
|
||||
if project.status != ProjectStatus::Running {
|
||||
return Err(format!("Project '{}' is not running", project.name));
|
||||
}
|
||||
|
||||
let container_id = project
|
||||
.container_id
|
||||
.as_ref()
|
||||
.ok_or_else(|| "Container not running".to_string())?;
|
||||
|
||||
let cmd = match session_type {
|
||||
Some("bash") => vec!["bash".to_string(), "-l".to_string()],
|
||||
_ => build_terminal_cmd(&project, &state.settings_store),
|
||||
};
|
||||
|
||||
let session_id = uuid::Uuid::new_v4().to_string();
|
||||
let project_name = project.name.clone();
|
||||
|
||||
// Set up output routing through the WS channel
|
||||
let out_tx_output = out_tx.clone();
|
||||
let session_id_output = session_id.clone();
|
||||
let on_output = move |data: Vec<u8>| {
|
||||
let encoded = BASE64.encode(&data);
|
||||
let _ = out_tx_output.send(ServerMessage::Output {
|
||||
session_id: session_id_output.clone(),
|
||||
data: encoded,
|
||||
});
|
||||
};
|
||||
|
||||
let out_tx_exit = out_tx.clone();
|
||||
let session_id_exit = session_id.clone();
|
||||
let on_exit = Box::new(move || {
|
||||
let _ = out_tx_exit.send(ServerMessage::Exit {
|
||||
session_id: session_id_exit,
|
||||
});
|
||||
});
|
||||
|
||||
state
|
||||
.exec_manager
|
||||
.create_session(container_id, &session_id, cmd, on_output, on_exit)
|
||||
.await?;
|
||||
|
||||
// Track this session for cleanup on disconnect
|
||||
owned_sessions.lock().await.push(session_id.clone());
|
||||
|
||||
let _ = out_tx.send(ServerMessage::Opened {
|
||||
session_id,
|
||||
project_name,
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,619 +1,20 @@
|
||||
import { useEffect, useRef, useCallback } from "react";
|
||||
import { useEffect, useRef, useCallback, useState } from "react";
|
||||
import { getHelpContent } from "../../lib/tauri-commands";
|
||||
|
||||
interface Props {
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
const HELP_MARKDOWN = `# How to Use Triple-C
|
||||
|
||||
Triple-C (Claude-Code-Container) is a desktop application that runs Claude Code inside isolated Docker containers. Each project gets its own sandboxed environment with bind-mounted directories, so Claude only has access to the files you explicitly provide.
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Docker
|
||||
|
||||
Triple-C requires a running Docker daemon. Install one of the following:
|
||||
|
||||
| Platform | Option | Link |
|
||||
|----------|--------|------|
|
||||
| **Windows** | Docker Desktop | https://docs.docker.com/desktop/install/windows-install/ |
|
||||
| **macOS** | Docker Desktop | https://docs.docker.com/desktop/install/mac-install/ |
|
||||
| **Linux** | Docker Engine | https://docs.docker.com/engine/install/ |
|
||||
| **Linux** | Docker Desktop (alternative) | https://docs.docker.com/desktop/install/linux/ |
|
||||
|
||||
After installation, verify Docker is running:
|
||||
|
||||
\`\`\`bash
|
||||
docker info
|
||||
\`\`\`
|
||||
|
||||
> **Windows note:** Docker Desktop must be running before launching Triple-C. The app communicates with Docker through the named pipe at \`//./pipe/docker_engine\`.
|
||||
|
||||
> **Linux note:** Your user must have permission to access the Docker socket (\`/var/run/docker.sock\`). Either add your user to the \`docker\` group (\`sudo usermod -aG docker $USER\`, then log out and back in) or run Docker in rootless mode.
|
||||
|
||||
### Claude Code Account
|
||||
|
||||
You need access to Claude Code through one of:
|
||||
|
||||
- **Anthropic account** — Sign up at https://claude.ai and use \`claude login\` (OAuth) inside the terminal
|
||||
- **AWS Bedrock** — An AWS account with Bedrock access and Claude models enabled
|
||||
- **Ollama** — A local or remote Ollama server running an Anthropic-compatible model (best-effort support)
|
||||
- **LiteLLM** — A LiteLLM proxy gateway providing access to 100+ model providers (best-effort support)
|
||||
|
||||
---
|
||||
|
||||
## First Launch
|
||||
|
||||
### 1. Get the Container Image
|
||||
|
||||
When you first open Triple-C, go to the **Settings** tab in the sidebar. Under **Docker**, you'll see:
|
||||
|
||||
- **Docker Status** — Should show "Connected" (green). If it shows "Not Available", make sure Docker is running.
|
||||
- **Image Status** — Will show "Not Found" on first launch.
|
||||
|
||||
Choose an **Image Source**:
|
||||
|
||||
| Source | Description | When to Use |
|
||||
|--------|-------------|-------------|
|
||||
| **Registry** | Pulls the pre-built image from \`repo.anhonesthost.net\` | Fastest setup — recommended for most users |
|
||||
| **Local Build** | Builds the image locally from the embedded Dockerfile | If you can't reach the registry, or want a custom build |
|
||||
| **Custom** | Use any Docker image you specify | Advanced — bring your own sandbox image |
|
||||
|
||||
Click **Pull Image** (for Registry/Custom) or **Build Image** (for Local Build). A progress log will stream below the button. When complete, the status changes to "Ready" (green).
|
||||
|
||||
### 2. Create Your First Project
|
||||
|
||||
Switch to the **Projects** tab in the sidebar and click the **+** button.
|
||||
|
||||
1. **Project Name** — Give it a meaningful name (e.g., "my-web-app").
|
||||
2. **Folders** — Click **Browse** to select a directory on your host machine. This directory will be mounted into the container at \`/workspace/<folder-name>\`. You can add multiple folders with the **+** button at the bottom of the folder list.
|
||||
3. Click **Add Project**.
|
||||
|
||||
### 3. Start the Container
|
||||
|
||||
Select your project in the sidebar and click **Start**. A progress modal appears showing real-time status as the container starts. The status dot changes from gray (stopped) to orange (starting) to green (running). The modal auto-closes on success.
|
||||
|
||||
### 4. Open a Terminal
|
||||
|
||||
Click the **Terminal** button to open an interactive terminal session. A new tab appears in the top bar and an xterm.js terminal loads in the main area.
|
||||
|
||||
Claude Code launches automatically with \`--dangerously-skip-permissions\` inside the sandboxed container.
|
||||
|
||||
### 5. Authenticate
|
||||
|
||||
**Anthropic (OAuth) — default:**
|
||||
|
||||
1. Type \`claude login\` or \`/login\` in the terminal.
|
||||
2. Claude prints an OAuth URL. Triple-C detects long URLs and shows a clickable toast at the top of the terminal — click **Open** to open it in your browser.
|
||||
3. Complete the login in your browser. The token is saved and persists across container stops and resets.
|
||||
|
||||
**AWS Bedrock:**
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the backend to **Bedrock**.
|
||||
3. Expand the **Config** panel and fill in your AWS credentials (see AWS Bedrock Configuration below).
|
||||
4. Start the container again.
|
||||
|
||||
**Ollama:**
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the backend to **Ollama**.
|
||||
3. Expand the **Config** panel and set the base URL of your Ollama server (defaults to \`http://host.docker.internal:11434\` for a local instance). Optionally set a model ID.
|
||||
4. Start the container again.
|
||||
|
||||
**LiteLLM:**
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the backend to **LiteLLM**.
|
||||
3. Expand the **Config** panel and set the base URL of your LiteLLM proxy (defaults to \`http://host.docker.internal:4000\`). Optionally set an API key and model ID.
|
||||
4. Start the container again.
|
||||
|
||||
---
|
||||
|
||||
## The Interface
|
||||
|
||||
\`\`\`
|
||||
┌─────────────────────────────────────────────────────┐
|
||||
│ TopBar [ Terminal Tabs ] Docker ● Image ●│
|
||||
├────────────┬────────────────────────────────────────┤
|
||||
│ Sidebar │ │
|
||||
│ │ Terminal View │
|
||||
│ Projects │ (xterm.js) │
|
||||
│ MCP │ │
|
||||
│ Settings │ │
|
||||
├────────────┴────────────────────────────────────────┤
|
||||
│ StatusBar X projects · X running · X terminals │
|
||||
└─────────────────────────────────────────────────────┘
|
||||
\`\`\`
|
||||
|
||||
- **TopBar** — Terminal tabs for switching between sessions. Bash shell tabs show a "(bash)" suffix. Status dots on the right show Docker connection (green = connected) and image availability (green = ready).
|
||||
- **Sidebar** — Toggle between the **Projects** list, **MCP** server configuration, and **Settings** panel.
|
||||
- **Terminal View** — Interactive terminal powered by xterm.js with WebGL rendering. Includes a **Jump to Current** button that appears when you scroll up, so you can quickly return to the latest output.
|
||||
- **StatusBar** — Counts of total projects, running containers, and open terminal sessions.
|
||||
|
||||
---
|
||||
|
||||
## Project Management
|
||||
|
||||
### Project Status
|
||||
|
||||
Each project shows a colored status dot:
|
||||
|
||||
| Color | Status | Meaning |
|
||||
|-------|--------|---------|
|
||||
| Gray | Stopped | Container is not running |
|
||||
| Orange | Starting / Stopping | Container is transitioning |
|
||||
| Green | Running | Container is active, ready for terminals |
|
||||
| Red | Error | Something went wrong (check error message) |
|
||||
|
||||
### Project Actions
|
||||
|
||||
Select a project in the sidebar to see its action buttons:
|
||||
|
||||
| Button | When Available | What It Does |
|
||||
|--------|---------------|--------------|
|
||||
| **Start** | Stopped | Creates (if needed) and starts the container |
|
||||
| **Stop** | Running | Stops the container but preserves its state |
|
||||
| **Terminal** | Running | Opens a new Claude Code terminal session |
|
||||
| **Shell** | Running | Opens a bash login shell in the container (no Claude Code) |
|
||||
| **Files** | Running | Opens the file manager to browse, download, and upload files |
|
||||
| **Reset** | Stopped | Destroys and recreates the container from scratch |
|
||||
| **Config** | Always | Toggles the configuration panel |
|
||||
| **Remove** | Stopped | Deletes the project and its container (with confirmation) |
|
||||
|
||||
### Renaming a Project
|
||||
|
||||
Double-click the project name in the sidebar to rename it inline. Press **Enter** to confirm or **Escape** to cancel.
|
||||
|
||||
### Container Lifecycle
|
||||
|
||||
Containers use a **stop/start** model. When you stop a container, everything inside it is preserved — installed packages, modified files, downloaded tools. Starting it again resumes where you left off.
|
||||
|
||||
**Reset** removes the container and creates a fresh one. However, your Claude Code configuration (including OAuth tokens from \`claude login\`) is stored in a separate Docker volume and survives resets.
|
||||
|
||||
Only **Remove** deletes everything, including the config volume and any stored credentials.
|
||||
|
||||
### Container Progress Feedback
|
||||
|
||||
When starting, stopping, or resetting a container, a progress modal shows real-time status messages (e.g., "Setting up MCP network...", "Starting MCP containers...", "Creating container..."). If an error occurs, the modal displays the error with a **Close** button. A **Force Stop** option is available if the operation stalls. The modal auto-closes on success.
|
||||
|
||||
---
|
||||
|
||||
## Project Configuration
|
||||
|
||||
Click **Config** on a selected project to expand the configuration panel. Settings can only be changed when the container is **stopped** (an orange warning box appears if the container is running).
|
||||
|
||||
### Mounted Folders
|
||||
|
||||
Each project mounts one or more host directories into the container. The mount appears at \`/workspace/<mount-name>\` inside the container.
|
||||
|
||||
- Click **Browse** ("...") to change the host path
|
||||
- Edit the mount name to control where it appears inside \`/workspace/\`
|
||||
- Click **+** to add more folders, or **x** to remove one
|
||||
- Mount names must be unique and use only letters, numbers, dashes, underscores, and dots
|
||||
|
||||
### SSH Keys
|
||||
|
||||
Specify the path to your SSH key directory (typically \`~/.ssh\`). Keys are mounted read-only and copied into the container with correct permissions. This enables \`git clone\` via SSH inside the container.
|
||||
|
||||
### Git Configuration
|
||||
|
||||
- **Git Name / Email** — Sets \`git config user.name\` and \`user.email\` inside the container.
|
||||
- **Git HTTPS Token** — A personal access token (e.g., from GitHub) for HTTPS git operations. Stored securely in your OS keychain — never written to disk in plaintext.
|
||||
|
||||
### Allow Container Spawning
|
||||
|
||||
When enabled, the host Docker socket is mounted into the container so Claude Code can create sibling containers (e.g., for running databases, test environments). This is **off by default** for security.
|
||||
|
||||
> Toggling this requires stopping and restarting the container to take effect.
|
||||
|
||||
### Mission Control
|
||||
|
||||
Toggle **Mission Control** to integrate Flight Control — an AI-first development methodology — into the project. When enabled:
|
||||
|
||||
- The Flight Control repository is automatically cloned into the container
|
||||
- Flight Control skills are installed to Claude Code's skill directory (\`~/.claude/skills/\`)
|
||||
- Project instructions are appended with Flight Control workflow guidance
|
||||
- The repository is symlinked at \`/workspace/mission-control\`
|
||||
|
||||
Available skills include \`/mission\`, \`/flight\`, \`/leg\`, \`/agentic-workflow\`, \`/flight-debrief\`, \`/mission-debrief\`, \`/daily-briefing\`, and \`/init-project\`.
|
||||
|
||||
> This setting can only be changed when the container is stopped. Toggling it triggers a container recreation on the next start.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Click **Edit** to open the environment variables modal. Add key-value pairs that will be injected into the container. Per-project variables override global variables with the same key.
|
||||
|
||||
> Reserved prefixes (\`ANTHROPIC_\`, \`AWS_\`, \`GIT_\`, \`HOST_\`, \`CLAUDE_\`, \`TRIPLE_C_\`) are filtered out to prevent conflicts with internal variables.
|
||||
|
||||
### Port Mappings
|
||||
|
||||
Click **Edit** to map host ports to container ports. This is useful when Claude Code starts a web server or other service inside the container and you want to access it from your host browser.
|
||||
|
||||
Each mapping specifies:
|
||||
- **Host Port** — The port on your machine (1-65535)
|
||||
- **Container Port** — The port inside the container (1-65535)
|
||||
- **Protocol** — TCP (default) or UDP
|
||||
|
||||
### Claude Instructions
|
||||
|
||||
Click **Edit** to write per-project instructions for Claude Code. These are written to \`~/.claude/CLAUDE.md\` inside the container and provide project-specific context. If you also have global instructions (in Settings), the global instructions come first, followed by the per-project instructions.
|
||||
|
||||
---
|
||||
|
||||
## MCP Servers (Beta)
|
||||
|
||||
Triple-C supports Model Context Protocol (MCP) servers, which extend Claude Code with access to external tools and data sources. MCP servers are configured in a **global library** and **enabled per-project**.
|
||||
|
||||
### How It Works
|
||||
|
||||
There are two dimensions to MCP server configuration:
|
||||
|
||||
| | **Manual** (no Docker image) | **Docker** (Docker image specified) |
|
||||
|---|---|---|
|
||||
| **Stdio** | Command runs inside the project container | Command runs in a separate MCP container via \`docker exec\` |
|
||||
| **HTTP** | Connects to a URL you provide | Runs in a separate container, reached by hostname on a shared Docker network |
|
||||
|
||||
**Docker images are pulled automatically** if not already present when the project starts.
|
||||
|
||||
### Accessing MCP Configuration
|
||||
|
||||
Click the **MCP** tab in the sidebar to open the MCP server library. This is where you define all available MCP servers.
|
||||
|
||||
### Adding an MCP Server
|
||||
|
||||
1. Type a name in the input field and click **Add**.
|
||||
2. Expand the server card and configure it.
|
||||
|
||||
The key decision is whether to set a **Docker Image**:
|
||||
- **With Docker image** — The MCP server runs in its own isolated container. Best for servers that need specific dependencies or system-level packages.
|
||||
- **Without Docker image** (manual) — The command runs directly inside your project container. Best for lightweight npx-based servers that just need Node.js.
|
||||
|
||||
Then choose the **Transport Type**:
|
||||
- **Stdio** — The MCP server communicates over stdin/stdout. This is the most common type.
|
||||
- **HTTP** — The MCP server exposes an HTTP endpoint (streamable HTTP transport).
|
||||
|
||||
### Configuration Examples
|
||||
|
||||
#### Example 1: Filesystem Server (Stdio, Manual)
|
||||
|
||||
A simple npx-based server that runs inside the project container. No Docker image needed since Node.js is already installed.
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Docker Image** | *(empty)* |
|
||||
| **Transport** | Stdio |
|
||||
| **Command** | \`npx\` |
|
||||
| **Arguments** | \`-y @modelcontextprotocol/server-filesystem /workspace\` |
|
||||
|
||||
#### Example 2: GitHub Server (Stdio, Manual)
|
||||
|
||||
Another npx-based server, with an environment variable for authentication.
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Docker Image** | *(empty)* |
|
||||
| **Transport** | Stdio |
|
||||
| **Command** | \`npx\` |
|
||||
| **Arguments** | \`-y @modelcontextprotocol/server-github\` |
|
||||
| **Environment Variables** | \`GITHUB_PERSONAL_ACCESS_TOKEN\` = \`ghp_your_token\` |
|
||||
|
||||
#### Example 3: Custom MCP Server (HTTP, Docker)
|
||||
|
||||
An MCP server packaged as a Docker image that exposes an HTTP endpoint.
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Docker Image** | \`myregistry/my-mcp-server:latest\` |
|
||||
| **Transport** | HTTP |
|
||||
| **Container Port** | \`8080\` |
|
||||
| **Environment Variables** | \`API_KEY\` = \`your_key\` |
|
||||
|
||||
#### Example 4: Database Server (Stdio, Docker)
|
||||
|
||||
An MCP server that needs its own runtime environment, communicating over stdio.
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Docker Image** | \`mcp/postgres-server:latest\` |
|
||||
| **Transport** | Stdio |
|
||||
| **Command** | \`node\` |
|
||||
| **Arguments** | \`dist/index.js\` |
|
||||
| **Environment Variables** | \`DATABASE_URL\` = \`postgresql://user:pass@host:5432/db\` |
|
||||
|
||||
### Enabling MCP Servers Per-Project
|
||||
|
||||
In a project's configuration panel (click **Config**), the **MCP Servers** section shows checkboxes for all globally defined servers. Toggle each server on or off for that project. Changes take effect on the next container start.
|
||||
|
||||
### How Docker-Based MCP Works
|
||||
|
||||
When a project with Docker-based MCP servers starts:
|
||||
|
||||
1. Missing Docker images are **automatically pulled** (progress shown in the progress modal)
|
||||
2. A dedicated **bridge network** is created for the project (\`triple-c-net-{projectId}\`)
|
||||
3. Each enabled Docker MCP server gets its own container on that network
|
||||
4. The main project container is connected to the same network
|
||||
5. MCP server configuration is written to \`~/.claude.json\` inside the container
|
||||
|
||||
**Networking**: Docker-based MCP containers are reached by their container name as a hostname (e.g., \`triple-c-mcp-{serverId}\`), not by \`localhost\`. Docker DNS resolves these names automatically on the shared bridge network.
|
||||
|
||||
**Stdio + Docker**: The project container uses \`docker exec\` to communicate with the MCP container over stdin/stdout. This automatically enables Docker socket access on the project container.
|
||||
|
||||
**HTTP + Docker**: The project container connects to the MCP container's HTTP endpoint using the container hostname and port (e.g., \`http://triple-c-mcp-{serverId}:3000/mcp\`).
|
||||
|
||||
**Manual (no Docker image)**: Stdio commands run directly inside the project container. HTTP URLs connect to wherever you point them (could be an external service or something running on the host).
|
||||
|
||||
### Configuration Change Detection
|
||||
|
||||
MCP server configuration is tracked via SHA-256 fingerprints stored as Docker labels. If you add, remove, or modify MCP servers for a project, the container is automatically recreated on the next start to apply the new configuration. The container filesystem is snapshotted first, so installed packages are preserved.
|
||||
|
||||
---
|
||||
|
||||
## AWS Bedrock Configuration
|
||||
|
||||
To use Claude via AWS Bedrock instead of Anthropic's API, switch the backend to **Bedrock** on the project card.
|
||||
|
||||
### Authentication Methods
|
||||
|
||||
| Method | Fields | Use Case |
|
||||
|--------|--------|----------|
|
||||
| **Keys** | Access Key ID, Secret Access Key, Session Token (optional) | Direct credentials — simplest setup |
|
||||
| **Profile** | AWS Profile name | Uses \`~/.aws/config\` and \`~/.aws/credentials\` on the host |
|
||||
| **Token** | Bearer Token | Temporary bearer token authentication |
|
||||
|
||||
### Additional Bedrock Settings
|
||||
|
||||
- **AWS Region** — Required. The region where your Bedrock models are deployed (e.g., \`us-east-1\`).
|
||||
- **Model ID** — Optional. Override the default Claude model (e.g., \`anthropic.claude-sonnet-4-20250514-v1:0\`).
|
||||
|
||||
### Global AWS Defaults
|
||||
|
||||
In **Settings > AWS Configuration**, you can set defaults that apply to all Bedrock projects:
|
||||
|
||||
- **AWS Config Path** — Path to your \`~/.aws\` directory. Click **Detect** to auto-find it.
|
||||
- **Default Profile** — Select from profiles found in your AWS config.
|
||||
- **Default Region** — Fallback region for projects that don't specify one.
|
||||
|
||||
Per-project settings always override these global defaults.
|
||||
|
||||
---
|
||||
|
||||
## Ollama Configuration
|
||||
|
||||
To use Claude Code with a local or remote Ollama server, switch the backend to **Ollama** on the project card.
|
||||
|
||||
### Settings
|
||||
|
||||
- **Base URL** — The URL of your Ollama server. Defaults to \`http://host.docker.internal:11434\`, which reaches a locally running Ollama instance from inside the container. For a remote server, use its IP or hostname (e.g., \`http://192.168.1.100:11434\`).
|
||||
- **Model ID** — Optional. Override the model to use (e.g., \`qwen3.5:27b\`).
|
||||
|
||||
### How It Works
|
||||
|
||||
Triple-C sets \`ANTHROPIC_BASE_URL\` to point Claude Code at your Ollama server instead of Anthropic's API. The \`ANTHROPIC_AUTH_TOKEN\` is set to \`ollama\` (required by Claude Code but not used for actual authentication).
|
||||
|
||||
> **Note:** Ollama support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models.
|
||||
|
||||
---
|
||||
|
||||
## LiteLLM Configuration
|
||||
|
||||
To use Claude Code through a LiteLLM proxy gateway, switch the backend to **LiteLLM** on the project card. LiteLLM supports 100+ model providers (OpenAI, Gemini, Anthropic, and more) through a single proxy.
|
||||
|
||||
### Settings
|
||||
|
||||
- **Base URL** — The URL of your LiteLLM proxy. Defaults to \`http://host.docker.internal:4000\` for a locally running proxy.
|
||||
- **API Key** — Optional. The API key for your LiteLLM proxy, if authentication is required. Stored securely in your OS keychain.
|
||||
- **Model ID** — Optional. Override the model to use.
|
||||
|
||||
### How It Works
|
||||
|
||||
Triple-C sets \`ANTHROPIC_BASE_URL\` to point Claude Code at your LiteLLM proxy. If an API key is provided, it is set as \`ANTHROPIC_AUTH_TOKEN\`.
|
||||
|
||||
> **Note:** LiteLLM support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected when routing to non-Anthropic models through the proxy.
|
||||
|
||||
---
|
||||
|
||||
## Settings
|
||||
|
||||
Access global settings via the **Settings** tab in the sidebar.
|
||||
|
||||
### Docker Settings
|
||||
|
||||
- **Docker Status** — Connection status to the Docker daemon.
|
||||
- **Image Source** — Where to get the sandbox container image (Registry, Local Build, or Custom).
|
||||
- **Pull / Build Image** — Download or build the image. Progress streams in real time.
|
||||
- **Refresh** — Re-check Docker and image status.
|
||||
|
||||
### Container Timezone
|
||||
|
||||
Set the timezone for all containers (IANA format, e.g., \`America/New_York\`, \`Europe/London\`, \`UTC\`). Auto-detected from your host on first launch. This affects scheduled task timing inside containers.
|
||||
|
||||
### Global Claude Instructions
|
||||
|
||||
Instructions applied to **all** projects. Written to \`~/.claude/CLAUDE.md\` in every container, before any per-project instructions.
|
||||
|
||||
### Global Environment Variables
|
||||
|
||||
Environment variables applied to **all** project containers. Per-project variables with the same key take precedence.
|
||||
|
||||
### Updates
|
||||
|
||||
- **Current Version** — The installed version of Triple-C.
|
||||
- **Auto-check** — Toggle automatic update checks (every 24 hours).
|
||||
- **Check now** — Manually check for updates.
|
||||
|
||||
When an update is available, a pulsing **Update** button appears in the top bar. Click it to see release notes and download links.
|
||||
|
||||
---
|
||||
|
||||
## Terminal Features
|
||||
|
||||
### Multiple Sessions
|
||||
|
||||
You can open multiple terminal sessions (even for the same project). Each session gets its own tab in the top bar. Click a tab to switch, or click the **x** on a tab to close it. Tabs show the project name, with a "(bash)" suffix for shell sessions.
|
||||
|
||||
### Bash Shell Sessions
|
||||
|
||||
In addition to Claude Code terminals, you can open a plain **bash login shell** in any running container by clicking the **Shell** button. This is useful for manual inspection, package installation, debugging, or running commands that don't need Claude Code.
|
||||
|
||||
### URL Detection
|
||||
|
||||
When Claude Code prints a long URL (e.g., during \`claude login\`), Triple-C detects it and shows a toast notification at the top of the terminal with an **Open** button. Clicking it opens the URL in your default browser. The toast auto-dismisses after 30 seconds.
|
||||
|
||||
Shorter URLs in terminal output are also clickable directly.
|
||||
|
||||
### Clipboard Support (OSC 52)
|
||||
|
||||
Programs inside the container can copy text to your host clipboard. When a container program uses \`xclip\`, \`xsel\`, or \`pbcopy\`, the text is transparently forwarded to your host clipboard via OSC 52 escape sequences. No additional configuration is required — this works out of the box.
|
||||
|
||||
### Image Paste
|
||||
|
||||
You can paste images from your clipboard into the terminal (Ctrl+V / Cmd+V). The image is uploaded to the container as \`/tmp/clipboard_<timestamp>.png\` and the file path is injected into the terminal input so Claude Code can reference it. A toast notification confirms the upload.
|
||||
|
||||
### Jump to Current
|
||||
|
||||
When you scroll up in the terminal to review previous output, a **Jump to Current** button appears in the bottom-right corner. Click it to scroll back to the latest output.
|
||||
|
||||
### File Manager
|
||||
|
||||
Click the **Files** button on a running project to open the file manager modal. You can:
|
||||
|
||||
- **Browse** the container filesystem starting from \`/workspace\`, with breadcrumb navigation
|
||||
- **Download** any file to your host machine via the download button on each file entry
|
||||
- **Upload** files from your host into the current container directory
|
||||
- **Refresh** the directory listing at any time
|
||||
|
||||
The file manager shows file names, sizes, and modification dates.
|
||||
|
||||
### Terminal Rendering
|
||||
|
||||
The terminal uses WebGL for hardware-accelerated rendering of the active tab. Inactive tabs fall back to canvas rendering to conserve GPU resources. The terminal automatically resizes when you resize the window.
|
||||
|
||||
---
|
||||
|
||||
## Scheduled Tasks (Inside the Container)
|
||||
|
||||
Once inside a running container terminal, you can set up recurring or one-time tasks using \`triple-c-scheduler\`. Tasks run as separate Claude Code sessions.
|
||||
|
||||
### Create a Recurring Task
|
||||
|
||||
\`\`\`bash
|
||||
triple-c-scheduler add --name "daily-review" --schedule "0 9 * * *" --prompt "Review open issues and summarize"
|
||||
\`\`\`
|
||||
|
||||
### Create a One-Time Task
|
||||
|
||||
\`\`\`bash
|
||||
triple-c-scheduler add --name "migrate-db" --at "2026-03-05 14:00" --prompt "Run database migrations"
|
||||
\`\`\`
|
||||
|
||||
One-time tasks automatically remove themselves after execution.
|
||||
|
||||
### Manage Tasks
|
||||
|
||||
\`\`\`bash
|
||||
triple-c-scheduler list # List all tasks
|
||||
triple-c-scheduler enable --id abc123 # Enable a task
|
||||
triple-c-scheduler disable --id abc123 # Disable a task
|
||||
triple-c-scheduler remove --id abc123 # Delete a task
|
||||
triple-c-scheduler run --id abc123 # Trigger a task immediately
|
||||
triple-c-scheduler logs --id abc123 # View logs for a task
|
||||
triple-c-scheduler logs --tail 20 # View last 20 log entries (all tasks)
|
||||
triple-c-scheduler notifications # View completion notifications
|
||||
triple-c-scheduler notifications --clear # Clear notifications
|
||||
\`\`\`
|
||||
|
||||
### Cron Schedule Format
|
||||
|
||||
Standard 5-field cron: \`minute hour day-of-month month day-of-week\`
|
||||
|
||||
| Example | Meaning |
|
||||
|---------|---------|
|
||||
| \`*/30 * * * *\` | Every 30 minutes |
|
||||
| \`0 9 * * 1-5\` | 9:00 AM on weekdays |
|
||||
| \`0 */2 * * *\` | Every 2 hours |
|
||||
| \`0 0 1 * *\` | Midnight on the 1st of each month |
|
||||
|
||||
### Working Directory
|
||||
|
||||
By default, tasks run in \`/workspace\`. Use \`--working-dir\` to specify a different directory:
|
||||
|
||||
\`\`\`bash
|
||||
triple-c-scheduler add --name "test" --schedule "0 */6 * * *" --prompt "Run tests" --working-dir /workspace/my-project
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
## What's Inside the Container
|
||||
|
||||
The sandbox container (Ubuntu 24.04) comes pre-installed with:
|
||||
|
||||
| Tool | Version | Purpose |
|
||||
|------|---------|---------|
|
||||
| Claude Code | Latest | AI coding assistant (the tool being sandboxed) |
|
||||
| Node.js | 22 LTS | JavaScript/TypeScript development |
|
||||
| pnpm | Latest | Fast Node.js package manager |
|
||||
| Python | 3.12 | Python development |
|
||||
| uv | Latest | Fast Python package manager |
|
||||
| ruff | Latest | Python linter/formatter |
|
||||
| Rust | Stable | Rust development (via rustup) |
|
||||
| Docker CLI | Latest | Container management (when spawning is enabled) |
|
||||
| git | Latest | Version control |
|
||||
| GitHub CLI (gh) | Latest | GitHub integration |
|
||||
| AWS CLI | v2 | AWS services and Bedrock |
|
||||
| ripgrep | Latest | Fast code search |
|
||||
| build-essential | — | C/C++ compiler toolchain |
|
||||
| openssh-client | — | SSH for git and remote access |
|
||||
|
||||
The container also includes **clipboard shims** (\`xclip\`, \`xsel\`, \`pbcopy\`) that forward copy operations to the host via OSC 52, and an **audio shim** (\`rec\`, \`arecord\`) for future voice mode support.
|
||||
|
||||
You can install additional tools at runtime with \`sudo apt install\`, \`pip install\`, \`npm install -g\`, etc. Installed packages persist across container stops (but not across resets).
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Docker is "Not Available"
|
||||
|
||||
- **Is Docker running?** Start Docker Desktop or the Docker daemon (\`sudo systemctl start docker\`).
|
||||
- **Permissions?** On Linux, ensure your user is in the \`docker\` group or the socket is accessible.
|
||||
- **Custom socket path?** If your Docker socket is not at the default location, set it in Settings. The app expects \`/var/run/docker.sock\` on Linux/macOS or \`//./pipe/docker_engine\` on Windows.
|
||||
|
||||
### Image is "Not Found"
|
||||
|
||||
- Click **Pull Image** or **Build Image** in Settings > Docker.
|
||||
- If pulling fails, check your network connection and whether you can reach the registry.
|
||||
- Try switching to **Local Build** as an alternative.
|
||||
|
||||
### Container Won't Start
|
||||
|
||||
- Check that the Docker image is "Ready" in Settings.
|
||||
- Verify that the mounted folder paths exist on your host.
|
||||
- Look at the error message displayed in the progress modal.
|
||||
|
||||
### OAuth Login URL Not Opening
|
||||
|
||||
- Triple-C detects long URLs printed by \`claude login\` and shows a toast with an **Open** button.
|
||||
- If the toast doesn't appear, try scrolling up in the terminal — the URL may have already been printed.
|
||||
- You can also manually copy the URL from the terminal output and paste it into your browser.
|
||||
|
||||
### File Permission Issues
|
||||
|
||||
- Triple-C automatically remaps the container user's UID/GID to match your host user, so files created inside the container should have the correct ownership on your host.
|
||||
- If you see permission errors, try resetting the container (stop, then click **Reset**).
|
||||
|
||||
### Settings Won't Save
|
||||
|
||||
- Most project settings can only be changed when the container is **stopped**. Stop the container first, make your changes, then start it again.
|
||||
- Some changes (like toggling Docker access, Mission Control, or changing mounted folders) trigger an automatic container recreation on the next start.
|
||||
|
||||
### MCP Containers Not Starting
|
||||
|
||||
- Ensure the Docker image for the MCP server exists (pull it first if needed).
|
||||
- Check that Docker socket access is available (stdio + Docker MCP servers auto-enable this).
|
||||
- Try resetting the project container to force a clean recreation.`;
|
||||
/** Convert header text to a URL-friendly slug for anchor links. */
|
||||
function slugify(text: string): string {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.replace(/<[^>]+>/g, "") // strip HTML tags (e.g. from inline code)
|
||||
.replace(/[^\w\s-]/g, "") // remove non-word chars except spaces/dashes
|
||||
.replace(/\s+/g, "-") // spaces to dashes
|
||||
.replace(/-+/g, "-") // collapse consecutive dashes
|
||||
.replace(/^-|-$/g, ""); // trim leading/trailing dashes
|
||||
}
|
||||
|
||||
/** Simple markdown-to-HTML converter for the help content. */
|
||||
function renderMarkdown(md: string): string {
|
||||
@@ -666,11 +67,11 @@ function renderMarkdown(md: string): string {
|
||||
// Horizontal rules
|
||||
html = html.replace(/\n---\n/g, '<hr class="help-hr"/>');
|
||||
|
||||
// Headers (process from h4 down to h1)
|
||||
html = html.replace(/^#### (.+)$/gm, '<h4 class="help-h4">$1</h4>');
|
||||
html = html.replace(/^### (.+)$/gm, '<h3 class="help-h3">$1</h3>');
|
||||
html = html.replace(/^## (.+)$/gm, '<h2 class="help-h2">$1</h2>');
|
||||
html = html.replace(/^# (.+)$/gm, '<h1 class="help-h1">$1</h1>');
|
||||
// Headers with id attributes for anchor navigation (process from h4 down to h1)
|
||||
html = html.replace(/^#### (.+)$/gm, (_m, title) => `<h4 class="help-h4" id="${slugify(title)}">${title}</h4>`);
|
||||
html = html.replace(/^### (.+)$/gm, (_m, title) => `<h3 class="help-h3" id="${slugify(title)}">${title}</h3>`);
|
||||
html = html.replace(/^## (.+)$/gm, (_m, title) => `<h2 class="help-h2" id="${slugify(title)}">${title}</h2>`);
|
||||
html = html.replace(/^# (.+)$/gm, (_m, title) => `<h1 class="help-h1" id="${slugify(title)}">${title}</h1>`);
|
||||
|
||||
// Bold (**...**)
|
||||
html = html.replace(/\*\*([^*]+)\*\*/g, "<strong>$1</strong>");
|
||||
@@ -678,6 +79,18 @@ function renderMarkdown(md: string): string {
|
||||
// Italic (*...*)
|
||||
html = html.replace(/\*([^*]+)\*/g, "<em>$1</em>");
|
||||
|
||||
// Markdown-style anchor links [text](#anchor)
|
||||
html = html.replace(
|
||||
/\[([^\]]+)\]\(#([^)]+)\)/g,
|
||||
'<a class="help-link" href="#$2">$1</a>',
|
||||
);
|
||||
|
||||
// Markdown-style external links [text](url)
|
||||
html = html.replace(
|
||||
/\[([^\]]+)\]\((https?:\/\/[^)]+)\)/g,
|
||||
'<a class="help-link" href="$2" target="_blank" rel="noopener noreferrer">$1</a>',
|
||||
);
|
||||
|
||||
// Unordered list items (- ...)
|
||||
// Group consecutive list items
|
||||
html = html.replace(/((?:^|\n)- .+(?:\n- .+)*)/g, (block) => {
|
||||
@@ -699,7 +112,7 @@ function renderMarkdown(md: string): string {
|
||||
return `<ol class="help-ol">${items}</ol>`;
|
||||
});
|
||||
|
||||
// Links - convert URLs to clickable links
|
||||
// Links - convert bare URLs to clickable links (skip already-wrapped URLs)
|
||||
html = html.replace(
|
||||
/(?<!="|'>)(https?:\/\/[^\s<)]+)/g,
|
||||
'<a class="help-link" href="$1" target="_blank" rel="noopener noreferrer">$1</a>',
|
||||
@@ -728,6 +141,9 @@ function renderMarkdown(md: string): string {
|
||||
|
||||
export default function HelpDialog({ onClose }: Props) {
|
||||
const overlayRef = useRef<HTMLDivElement>(null);
|
||||
const contentRef = useRef<HTMLDivElement>(null);
|
||||
const [markdown, setMarkdown] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
@@ -737,6 +153,12 @@ export default function HelpDialog({ onClose }: Props) {
|
||||
return () => document.removeEventListener("keydown", handleKeyDown);
|
||||
}, [onClose]);
|
||||
|
||||
useEffect(() => {
|
||||
getHelpContent()
|
||||
.then(setMarkdown)
|
||||
.catch((e) => setError(String(e)));
|
||||
}, []);
|
||||
|
||||
const handleOverlayClick = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>) => {
|
||||
if (e.target === overlayRef.current) onClose();
|
||||
@@ -744,7 +166,17 @@ export default function HelpDialog({ onClose }: Props) {
|
||||
[onClose],
|
||||
);
|
||||
|
||||
const renderedHtml = renderMarkdown(HELP_MARKDOWN);
|
||||
// Handle anchor link clicks to scroll within the dialog
|
||||
const handleContentClick = useCallback((e: React.MouseEvent<HTMLDivElement>) => {
|
||||
const target = e.target as HTMLElement;
|
||||
const anchor = target.closest("a");
|
||||
if (!anchor) return;
|
||||
const href = anchor.getAttribute("href");
|
||||
if (!href || !href.startsWith("#")) return;
|
||||
e.preventDefault();
|
||||
const el = contentRef.current?.querySelector(href);
|
||||
if (el) el.scrollIntoView({ behavior: "smooth" });
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -766,9 +198,20 @@ export default function HelpDialog({ onClose }: Props) {
|
||||
|
||||
{/* Scrollable content */}
|
||||
<div
|
||||
ref={contentRef}
|
||||
onClick={handleContentClick}
|
||||
className="flex-1 overflow-y-auto px-6 py-4 help-content"
|
||||
dangerouslySetInnerHTML={{ __html: renderedHtml }}
|
||||
/>
|
||||
>
|
||||
{error && (
|
||||
<p className="text-[var(--error)] text-sm">Failed to load help content: {error}</p>
|
||||
)}
|
||||
{!markdown && !error && (
|
||||
<p className="text-[var(--text-secondary)] text-sm">Loading...</p>
|
||||
)}
|
||||
{markdown && (
|
||||
<div dangerouslySetInnerHTML={{ __html: renderMarkdown(markdown) }} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -2,8 +2,8 @@ import { useShallow } from "zustand/react/shallow";
|
||||
import { useAppState } from "../../store/appState";
|
||||
|
||||
export default function StatusBar() {
|
||||
const { projects, sessions } = useAppState(
|
||||
useShallow(s => ({ projects: s.projects, sessions: s.sessions }))
|
||||
const { projects, sessions, terminalHasSelection } = useAppState(
|
||||
useShallow(s => ({ projects: s.projects, sessions: s.sessions, terminalHasSelection: s.terminalHasSelection }))
|
||||
);
|
||||
const running = projects.filter((p) => p.status === "running").length;
|
||||
|
||||
@@ -20,6 +20,12 @@ export default function StatusBar() {
|
||||
<span>
|
||||
{sessions.length} terminal{sessions.length !== 1 ? "s" : ""}
|
||||
</span>
|
||||
{terminalHasSelection && (
|
||||
<>
|
||||
<span className="mx-2">|</span>
|
||||
<span className="text-[var(--accent)]">Ctrl+Shift+C to copy</span>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { open } from "@tauri-apps/plugin-dialog";
|
||||
import { listen } from "@tauri-apps/api/event";
|
||||
import type { Project, ProjectPath, Backend, BedrockConfig, BedrockAuthMethod, OllamaConfig, LiteLlmConfig } from "../../lib/types";
|
||||
import type { Project, ProjectPath, Backend, BedrockConfig, BedrockAuthMethod, OllamaConfig, OpenAiCompatibleConfig } from "../../lib/types";
|
||||
import { useProjects } from "../../hooks/useProjects";
|
||||
import { useMcpServers } from "../../hooks/useMcpServers";
|
||||
import { useTerminal } from "../../hooks/useTerminal";
|
||||
@@ -63,10 +63,10 @@ export default function ProjectCard({ project }: Props) {
|
||||
const [ollamaBaseUrl, setOllamaBaseUrl] = useState(project.ollama_config?.base_url ?? "http://host.docker.internal:11434");
|
||||
const [ollamaModelId, setOllamaModelId] = useState(project.ollama_config?.model_id ?? "");
|
||||
|
||||
// LiteLLM local state
|
||||
const [litellmBaseUrl, setLitellmBaseUrl] = useState(project.litellm_config?.base_url ?? "http://host.docker.internal:4000");
|
||||
const [litellmApiKey, setLitellmApiKey] = useState(project.litellm_config?.api_key ?? "");
|
||||
const [litellmModelId, setLitellmModelId] = useState(project.litellm_config?.model_id ?? "");
|
||||
// OpenAI Compatible local state
|
||||
const [openaiCompatibleBaseUrl, setOpenaiCompatibleBaseUrl] = useState(project.openai_compatible_config?.base_url ?? "http://host.docker.internal:4000");
|
||||
const [openaiCompatibleApiKey, setOpenaiCompatibleApiKey] = useState(project.openai_compatible_config?.api_key ?? "");
|
||||
const [openaiCompatibleModelId, setOpenaiCompatibleModelId] = useState(project.openai_compatible_config?.model_id ?? "");
|
||||
|
||||
// Sync local state when project prop changes (e.g., after save or external update)
|
||||
useEffect(() => {
|
||||
@@ -88,9 +88,9 @@ export default function ProjectCard({ project }: Props) {
|
||||
setBedrockModelId(project.bedrock_config?.model_id ?? "");
|
||||
setOllamaBaseUrl(project.ollama_config?.base_url ?? "http://host.docker.internal:11434");
|
||||
setOllamaModelId(project.ollama_config?.model_id ?? "");
|
||||
setLitellmBaseUrl(project.litellm_config?.base_url ?? "http://host.docker.internal:4000");
|
||||
setLitellmApiKey(project.litellm_config?.api_key ?? "");
|
||||
setLitellmModelId(project.litellm_config?.model_id ?? "");
|
||||
setOpenaiCompatibleBaseUrl(project.openai_compatible_config?.base_url ?? "http://host.docker.internal:4000");
|
||||
setOpenaiCompatibleApiKey(project.openai_compatible_config?.api_key ?? "");
|
||||
setOpenaiCompatibleModelId(project.openai_compatible_config?.model_id ?? "");
|
||||
}, [project]);
|
||||
|
||||
// Listen for container progress events
|
||||
@@ -197,7 +197,7 @@ export default function ProjectCard({ project }: Props) {
|
||||
model_id: null,
|
||||
};
|
||||
|
||||
const defaultLiteLlmConfig: LiteLlmConfig = {
|
||||
const defaultOpenAiCompatibleConfig: OpenAiCompatibleConfig = {
|
||||
base_url: "http://host.docker.internal:4000",
|
||||
api_key: null,
|
||||
model_id: null,
|
||||
@@ -212,8 +212,8 @@ export default function ProjectCard({ project }: Props) {
|
||||
if (mode === "ollama" && !project.ollama_config) {
|
||||
updates.ollama_config = defaultOllamaConfig;
|
||||
}
|
||||
if (mode === "lite_llm" && !project.litellm_config) {
|
||||
updates.litellm_config = defaultLiteLlmConfig;
|
||||
if (mode === "open_ai_compatible" && !project.openai_compatible_config) {
|
||||
updates.openai_compatible_config = defaultOpenAiCompatibleConfig;
|
||||
}
|
||||
await update({ ...project, ...updates });
|
||||
} catch (e) {
|
||||
@@ -355,30 +355,30 @@ export default function ProjectCard({ project }: Props) {
|
||||
}
|
||||
};
|
||||
|
||||
const handleLitellmBaseUrlBlur = async () => {
|
||||
const handleOpenaiCompatibleBaseUrlBlur = async () => {
|
||||
try {
|
||||
const current = project.litellm_config ?? defaultLiteLlmConfig;
|
||||
await update({ ...project, litellm_config: { ...current, base_url: litellmBaseUrl } });
|
||||
const current = project.openai_compatible_config ?? defaultOpenAiCompatibleConfig;
|
||||
await update({ ...project, openai_compatible_config: { ...current, base_url: openaiCompatibleBaseUrl } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update LiteLLM base URL:", err);
|
||||
console.error("Failed to update OpenAI Compatible base URL:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLitellmApiKeyBlur = async () => {
|
||||
const handleOpenaiCompatibleApiKeyBlur = async () => {
|
||||
try {
|
||||
const current = project.litellm_config ?? defaultLiteLlmConfig;
|
||||
await update({ ...project, litellm_config: { ...current, api_key: litellmApiKey || null } });
|
||||
const current = project.openai_compatible_config ?? defaultOpenAiCompatibleConfig;
|
||||
await update({ ...project, openai_compatible_config: { ...current, api_key: openaiCompatibleApiKey || null } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update LiteLLM API key:", err);
|
||||
console.error("Failed to update OpenAI Compatible API key:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLitellmModelIdBlur = async () => {
|
||||
const handleOpenaiCompatibleModelIdBlur = async () => {
|
||||
try {
|
||||
const current = project.litellm_config ?? defaultLiteLlmConfig;
|
||||
await update({ ...project, litellm_config: { ...current, model_id: litellmModelId || null } });
|
||||
const current = project.openai_compatible_config ?? defaultOpenAiCompatibleConfig;
|
||||
await update({ ...project, openai_compatible_config: { ...current, model_id: openaiCompatibleModelId || null } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update LiteLLM model ID:", err);
|
||||
console.error("Failed to update OpenAI Compatible model ID:", err);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -449,7 +449,7 @@ export default function ProjectCard({ project }: Props) {
|
||||
<div className="mt-2 ml-4 space-y-2 min-w-0 overflow-hidden">
|
||||
{/* Backend selector */}
|
||||
<div className="flex items-center gap-1 text-xs">
|
||||
<span className="text-[var(--text-secondary)] mr-1">Backend:<Tooltip text="Choose the AI model provider for this project. Anthropic: Connect directly to Claude via OAuth login (run 'claude login' in terminal). Bedrock: Route through AWS Bedrock using your AWS credentials. Ollama: Use locally-hosted open-source models (Llama, Mistral, etc.) via an Ollama server. LiteLLM: Connect through a LiteLLM proxy gateway to access 100+ model providers (OpenAI, Azure, Gemini, etc.)." /></span>
|
||||
<span className="text-[var(--text-secondary)] mr-1">Backend:<Tooltip text="Choose the AI model provider for this project. Anthropic: Connect directly to Claude via OAuth login (run 'claude login' in terminal). Bedrock: Route through AWS Bedrock using your AWS credentials. Ollama: Use locally-hosted open-source models (Llama, Mistral, etc.) via an Ollama server. OpenAI Compatible: Connect through any OpenAI API-compatible endpoint (LiteLLM, OpenRouter, vLLM, etc.) to access 100+ model providers." /></span>
|
||||
<select
|
||||
value={project.backend}
|
||||
onChange={(e) => { e.stopPropagation(); handleBackendChange(e.target.value as Backend); }}
|
||||
@@ -460,7 +460,7 @@ export default function ProjectCard({ project }: Props) {
|
||||
<option value="anthropic">Anthropic</option>
|
||||
<option value="bedrock">Bedrock</option>
|
||||
<option value="ollama">Ollama</option>
|
||||
<option value="lite_llm">LiteLLM</option>
|
||||
<option value="open_ai_compatible">OpenAI Compatible</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
@@ -712,6 +712,32 @@ export default function ProjectCard({ project }: Props) {
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Full Permissions toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="text-xs text-[var(--text-secondary)]">
|
||||
Full Permissions
|
||||
<span className="text-[var(--error)] font-semibold ml-1">(CAUTION)</span>
|
||||
<Tooltip text="When enabled, Claude runs with --dangerously-skip-permissions and auto-approves all tool calls without prompting. Only enable this if you trust the sandboxed environment to contain all actions. When disabled, Claude will ask for your approval before running commands, editing files, etc." />
|
||||
</label>
|
||||
<button
|
||||
onClick={async () => {
|
||||
try {
|
||||
await update({ ...project, full_permissions: !project.full_permissions });
|
||||
} catch (err) {
|
||||
console.error("Failed to update full permissions setting:", err);
|
||||
}
|
||||
}}
|
||||
disabled={!isStopped}
|
||||
className={`px-2 py-0.5 text-xs rounded transition-colors disabled:opacity-50 ${
|
||||
project.full_permissions
|
||||
? "bg-[var(--error)] text-white"
|
||||
: "bg-[var(--bg-primary)] border border-[var(--border-color)] text-[var(--text-secondary)]"
|
||||
}`}
|
||||
>
|
||||
{project.full_permissions ? "ON" : "OFF"}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Environment Variables */}
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-xs text-[var(--text-secondary)]">
|
||||
@@ -942,7 +968,7 @@ export default function ProjectCard({ project }: Props) {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Model (optional)<Tooltip text="Ollama model name to use (e.g. qwen3.5:27b). Leave blank for the server default." /></label>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Model (required)<Tooltip text="Ollama model name to use (e.g. qwen3.5:27b). The model must be pulled in Ollama before starting the container." /></label>
|
||||
<input
|
||||
value={ollamaModelId}
|
||||
onChange={(e) => setOllamaModelId(e.target.value)}
|
||||
@@ -956,38 +982,38 @@ export default function ProjectCard({ project }: Props) {
|
||||
);
|
||||
})()}
|
||||
|
||||
{/* LiteLLM config */}
|
||||
{project.backend === "lite_llm" && (() => {
|
||||
{/* OpenAI Compatible config */}
|
||||
{project.backend === "open_ai_compatible" && (() => {
|
||||
const inputCls = "w-full px-2 py-1 bg-[var(--bg-primary)] border border-[var(--border-color)] rounded text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50";
|
||||
return (
|
||||
<div className="space-y-2 pt-1 border-t border-[var(--border-color)]">
|
||||
<label className="block text-xs font-medium text-[var(--text-primary)]">LiteLLM Gateway</label>
|
||||
<label className="block text-xs font-medium text-[var(--text-primary)]">OpenAI Compatible Endpoint</label>
|
||||
<p className="text-xs text-[var(--text-secondary)]">
|
||||
Connect through a LiteLLM proxy to use 100+ model providers.
|
||||
Connect through any OpenAI API-compatible endpoint (LiteLLM, OpenRouter, vLLM, etc.).
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Base URL<Tooltip text="URL of your LiteLLM proxy server. Use host.docker.internal for a locally running proxy." /></label>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Base URL<Tooltip text="URL of your OpenAI API-compatible server. Use host.docker.internal for a locally running service." /></label>
|
||||
<input
|
||||
value={litellmBaseUrl}
|
||||
onChange={(e) => setLitellmBaseUrl(e.target.value)}
|
||||
onBlur={handleLitellmBaseUrlBlur}
|
||||
value={openaiCompatibleBaseUrl}
|
||||
onChange={(e) => setOpenaiCompatibleBaseUrl(e.target.value)}
|
||||
onBlur={handleOpenaiCompatibleBaseUrlBlur}
|
||||
placeholder="http://host.docker.internal:4000"
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
/>
|
||||
<p className="text-xs text-[var(--text-secondary)] mt-0.5 opacity-70">
|
||||
Use host.docker.internal for local, or a URL for remote/containerized LiteLLM.
|
||||
Use host.docker.internal for local, or a URL for a remote OpenAI-compatible service.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">API Key<Tooltip text="Authentication key for your LiteLLM proxy, if required." /></label>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">API Key<Tooltip text="Authentication key for your OpenAI-compatible endpoint, if required." /></label>
|
||||
<input
|
||||
type="password"
|
||||
value={litellmApiKey}
|
||||
onChange={(e) => setLitellmApiKey(e.target.value)}
|
||||
onBlur={handleLitellmApiKeyBlur}
|
||||
value={openaiCompatibleApiKey}
|
||||
onChange={(e) => setOpenaiCompatibleApiKey(e.target.value)}
|
||||
onBlur={handleOpenaiCompatibleApiKeyBlur}
|
||||
placeholder="sk-..."
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
@@ -995,11 +1021,11 @@ export default function ProjectCard({ project }: Props) {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Model (optional)<Tooltip text="Model identifier as configured in your LiteLLM proxy (e.g. gpt-4o, gemini-pro)." /></label>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Model (optional)<Tooltip text="Model identifier as configured in your provider (e.g. gpt-4o, gemini-pro)." /></label>
|
||||
<input
|
||||
value={litellmModelId}
|
||||
onChange={(e) => setLitellmModelId(e.target.value)}
|
||||
onBlur={handleLitellmModelIdBlur}
|
||||
value={openaiCompatibleModelId}
|
||||
onChange={(e) => setOpenaiCompatibleModelId(e.target.value)}
|
||||
onBlur={handleOpenaiCompatibleModelIdBlur}
|
||||
placeholder="gpt-4o / gemini-pro / etc."
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useSettings } from "../../hooks/useSettings";
|
||||
import type { ImageSource } from "../../lib/types";
|
||||
import Tooltip from "../ui/Tooltip";
|
||||
|
||||
const REGISTRY_IMAGE = "repo.anhonesthost.net/cybercovellc/triple-c/triple-c-sandbox:latest";
|
||||
const REGISTRY_IMAGE = "ghcr.io/shadowdao/triple-c-sandbox:latest";
|
||||
|
||||
const IMAGE_SOURCE_OPTIONS: { value: ImageSource; label: string; description: string }[] = [
|
||||
{ value: "registry", label: "Registry", description: "Pull from container registry" },
|
||||
|
||||
@@ -8,6 +8,7 @@ import EnvVarsModal from "../projects/EnvVarsModal";
|
||||
import { detectHostTimezone } from "../../lib/tauri-commands";
|
||||
import type { EnvVar } from "../../lib/types";
|
||||
import Tooltip from "../ui/Tooltip";
|
||||
import WebTerminalSettings from "./WebTerminalSettings";
|
||||
|
||||
export default function SettingsPanel() {
|
||||
const { appSettings, saveSettings } = useSettings();
|
||||
@@ -116,6 +117,9 @@ export default function SettingsPanel() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Web Terminal */}
|
||||
<WebTerminalSettings />
|
||||
|
||||
{/* Updates section */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium mb-2">Updates<Tooltip text="Check for new versions of the Triple-C app and container image." /></label>
|
||||
|
||||
128
app/src/components/settings/WebTerminalSettings.tsx
Normal file
128
app/src/components/settings/WebTerminalSettings.tsx
Normal file
@@ -0,0 +1,128 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { startWebTerminal, stopWebTerminal, getWebTerminalStatus, regenerateWebTerminalToken } from "../../lib/tauri-commands";
|
||||
import type { WebTerminalInfo } from "../../lib/types";
|
||||
import Tooltip from "../ui/Tooltip";
|
||||
|
||||
export default function WebTerminalSettings() {
|
||||
const [info, setInfo] = useState<WebTerminalInfo | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
getWebTerminalStatus().then(setInfo).catch(console.error);
|
||||
}, []);
|
||||
|
||||
const handleToggle = async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
if (info?.running) {
|
||||
await stopWebTerminal();
|
||||
const updated = await getWebTerminalStatus();
|
||||
setInfo(updated);
|
||||
} else {
|
||||
const updated = await startWebTerminal();
|
||||
setInfo(updated);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Web terminal toggle failed:", e);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRegenerate = async () => {
|
||||
try {
|
||||
const updated = await regenerateWebTerminalToken();
|
||||
setInfo(updated);
|
||||
} catch (e) {
|
||||
console.error("Token regeneration failed:", e);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCopyUrl = async () => {
|
||||
if (info?.url) {
|
||||
await navigator.clipboard.writeText(info.url);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCopyToken = async () => {
|
||||
if (info?.access_token) {
|
||||
await navigator.clipboard.writeText(info.access_token);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<label className="block text-sm font-medium mb-1">
|
||||
Web Terminal
|
||||
<Tooltip text="Access your terminals from a tablet or phone on the local network via a web browser." />
|
||||
</label>
|
||||
<p className="text-xs text-[var(--text-secondary)] mb-2">
|
||||
Serves a browser-based terminal UI on your local network for remote access to running projects.
|
||||
</p>
|
||||
|
||||
<div className="space-y-2">
|
||||
{/* Toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
onClick={handleToggle}
|
||||
disabled={loading}
|
||||
className={`px-2 py-0.5 text-xs rounded transition-colors ${
|
||||
info?.running
|
||||
? "bg-[var(--success)] text-white"
|
||||
: "bg-[var(--bg-primary)] border border-[var(--border-color)] text-[var(--text-secondary)]"
|
||||
}`}
|
||||
>
|
||||
{loading ? "..." : info?.running ? "ON" : "OFF"}
|
||||
</button>
|
||||
<span className="text-xs text-[var(--text-secondary)]">
|
||||
{info?.running
|
||||
? `Running on port ${info.port}`
|
||||
: "Stopped"}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* URL + Copy */}
|
||||
{info?.running && info.url && (
|
||||
<div className="flex items-center gap-2">
|
||||
<code className="text-xs text-[var(--accent)] bg-[var(--bg-primary)] px-2 py-1 rounded border border-[var(--border-color)] truncate flex-1">
|
||||
{info.url}
|
||||
</code>
|
||||
<button
|
||||
onClick={handleCopyUrl}
|
||||
className="text-xs px-2 py-0.5 text-[var(--accent)] hover:text-[var(--accent-hover)] hover:bg-[var(--bg-primary)] rounded transition-colors"
|
||||
>
|
||||
{copied ? "Copied!" : "Copy URL"}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Token */}
|
||||
{info && (
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-xs text-[var(--text-secondary)]">Token:</span>
|
||||
<code className="text-xs text-[var(--text-primary)] bg-[var(--bg-primary)] px-2 py-0.5 rounded border border-[var(--border-color)] truncate max-w-[160px]">
|
||||
{info.access_token ? `${info.access_token.slice(0, 12)}...` : "None"}
|
||||
</code>
|
||||
<button
|
||||
onClick={handleCopyToken}
|
||||
className="text-xs px-2 py-0.5 text-[var(--accent)] hover:text-[var(--accent-hover)] hover:bg-[var(--bg-primary)] rounded transition-colors"
|
||||
>
|
||||
Copy
|
||||
</button>
|
||||
<button
|
||||
onClick={handleRegenerate}
|
||||
className="text-xs px-2 py-0.5 text-[var(--warning,#f59e0b)] hover:bg-[var(--bg-primary)] rounded transition-colors"
|
||||
>
|
||||
Regenerate
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -24,6 +24,7 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
const webglRef = useRef<WebglAddon | null>(null);
|
||||
const detectorRef = useRef<UrlDetector | null>(null);
|
||||
const { sendInput, pasteImage, resize, onOutput, onExit } = useTerminal();
|
||||
const setTerminalHasSelection = useAppState(s => s.setTerminalHasSelection);
|
||||
|
||||
const ssoBufferRef = useRef("");
|
||||
const ssoTriggeredRef = useRef(false);
|
||||
@@ -34,6 +35,12 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
const [detectedUrl, setDetectedUrl] = useState<string | null>(null);
|
||||
const [imagePasteMsg, setImagePasteMsg] = useState<string | null>(null);
|
||||
const [isAtBottom, setIsAtBottom] = useState(true);
|
||||
const [isAutoFollow, setIsAutoFollow] = useState(true);
|
||||
const isAtBottomRef = useRef(true);
|
||||
// Tracks user intent to follow output — only set to false by explicit user
|
||||
// actions (mouse wheel up), not by xterm scroll events during writes.
|
||||
const autoFollowRef = useRef(true);
|
||||
const lastUserScrollTimeRef = useRef(0);
|
||||
|
||||
useEffect(() => {
|
||||
if (!containerRef.current) return;
|
||||
@@ -80,6 +87,22 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
|
||||
term.open(containerRef.current);
|
||||
|
||||
// Ctrl+Shift+C copies selected terminal text to clipboard.
|
||||
// This prevents the keystroke from reaching the container (where
|
||||
// Ctrl+C would send SIGINT and cancel running work).
|
||||
term.attachCustomKeyEventHandler((event) => {
|
||||
if (event.type === "keydown" && event.ctrlKey && event.shiftKey && event.key === "C") {
|
||||
const sel = term.getSelection();
|
||||
if (sel) {
|
||||
navigator.clipboard.writeText(sel).catch((e) =>
|
||||
console.error("Ctrl+Shift+C clipboard write failed:", e),
|
||||
);
|
||||
}
|
||||
return false; // prevent xterm from processing this key
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
// WebGL addon is loaded/disposed dynamically in the active effect
|
||||
// to avoid exhausting the browser's limited WebGL context pool.
|
||||
|
||||
@@ -114,10 +137,45 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
sendInput(sessionId, data);
|
||||
});
|
||||
|
||||
// Track scroll position to show "Jump to Current" button
|
||||
// Detect user-initiated scroll-up (mouse wheel) to pause auto-follow.
|
||||
// Captured during capture phase so it fires before xterm's own handler.
|
||||
const handleWheel = (e: WheelEvent) => {
|
||||
lastUserScrollTimeRef.current = Date.now();
|
||||
if (e.deltaY < 0) {
|
||||
autoFollowRef.current = false;
|
||||
setIsAutoFollow(false);
|
||||
isAtBottomRef.current = false;
|
||||
setIsAtBottom(false);
|
||||
}
|
||||
};
|
||||
containerRef.current.addEventListener("wheel", handleWheel, { capture: true, passive: true });
|
||||
|
||||
// Track scroll position to show "Jump to Current" button.
|
||||
// Debounce state updates via rAF to avoid excessive re-renders during rapid output.
|
||||
let scrollStateRafId: number | null = null;
|
||||
const scrollDisposable = term.onScroll(() => {
|
||||
const buf = term.buffer.active;
|
||||
setIsAtBottom(buf.viewportY >= buf.baseY);
|
||||
const atBottom = buf.viewportY >= buf.baseY;
|
||||
isAtBottomRef.current = atBottom;
|
||||
|
||||
// Re-enable auto-follow only when USER scrolls to bottom (not write-triggered)
|
||||
const isUserScroll = (Date.now() - lastUserScrollTimeRef.current) < 300;
|
||||
if (atBottom && isUserScroll && !autoFollowRef.current) {
|
||||
autoFollowRef.current = true;
|
||||
setIsAutoFollow(true);
|
||||
}
|
||||
|
||||
if (scrollStateRafId === null) {
|
||||
scrollStateRafId = requestAnimationFrame(() => {
|
||||
scrollStateRafId = null;
|
||||
setIsAtBottom(isAtBottomRef.current);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Track text selection to show copy hint in status bar
|
||||
const selectionDisposable = term.onSelectionChange(() => {
|
||||
setTerminalHasSelection(term.hasSelection());
|
||||
});
|
||||
|
||||
// Handle image paste: intercept paste events with image data,
|
||||
@@ -165,7 +223,15 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
|
||||
const outputPromise = onOutput(sessionId, (data) => {
|
||||
if (aborted) return;
|
||||
term.write(data);
|
||||
term.write(data, () => {
|
||||
if (autoFollowRef.current) {
|
||||
term.scrollToBottom();
|
||||
if (!isAtBottomRef.current) {
|
||||
isAtBottomRef.current = true;
|
||||
setIsAtBottom(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
detector.feed(data);
|
||||
|
||||
// Scan for SSO refresh marker in terminal output
|
||||
@@ -209,6 +275,9 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
if (!containerRef.current || containerRef.current.offsetWidth === 0) return;
|
||||
fitAddon.fit();
|
||||
resize(sessionId, term.cols, term.rows);
|
||||
if (autoFollowRef.current) {
|
||||
term.scrollToBottom();
|
||||
}
|
||||
});
|
||||
});
|
||||
resizeObserver.observe(containerRef.current);
|
||||
@@ -222,9 +291,13 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
osc52Disposable.dispose();
|
||||
inputDisposable.dispose();
|
||||
scrollDisposable.dispose();
|
||||
selectionDisposable.dispose();
|
||||
setTerminalHasSelection(false);
|
||||
containerRef.current?.removeEventListener("wheel", handleWheel, { capture: true });
|
||||
containerRef.current?.removeEventListener("paste", handlePaste, { capture: true });
|
||||
outputPromise.then((fn) => fn?.());
|
||||
exitPromise.then((fn) => fn?.());
|
||||
if (scrollStateRafId !== null) cancelAnimationFrame(scrollStateRafId);
|
||||
if (resizeRafId !== null) cancelAnimationFrame(resizeRafId);
|
||||
resizeObserver.disconnect();
|
||||
try { webglRef.current?.dispose(); } catch { /* may already be disposed */ }
|
||||
@@ -256,6 +329,9 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
}
|
||||
}
|
||||
fitRef.current?.fit();
|
||||
if (autoFollowRef.current) {
|
||||
term.scrollToBottom();
|
||||
}
|
||||
term.focus();
|
||||
} else {
|
||||
// Release WebGL context for inactive terminals
|
||||
@@ -290,8 +366,30 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
}, [detectedUrl]);
|
||||
|
||||
const handleScrollToBottom = useCallback(() => {
|
||||
termRef.current?.scrollToBottom();
|
||||
setIsAtBottom(true);
|
||||
const term = termRef.current;
|
||||
if (term) {
|
||||
autoFollowRef.current = true;
|
||||
setIsAutoFollow(true);
|
||||
fitRef.current?.fit();
|
||||
term.scrollToBottom();
|
||||
isAtBottomRef.current = true;
|
||||
setIsAtBottom(true);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleToggleAutoFollow = useCallback(() => {
|
||||
const next = !autoFollowRef.current;
|
||||
autoFollowRef.current = next;
|
||||
setIsAutoFollow(next);
|
||||
if (next) {
|
||||
const term = termRef.current;
|
||||
if (term) {
|
||||
fitRef.current?.fit();
|
||||
term.scrollToBottom();
|
||||
isAtBottomRef.current = true;
|
||||
setIsAtBottom(true);
|
||||
}
|
||||
}
|
||||
}, []);
|
||||
|
||||
return (
|
||||
@@ -314,6 +412,19 @@ export default function TerminalView({ sessionId, active }: Props) {
|
||||
{imagePasteMsg}
|
||||
</div>
|
||||
)}
|
||||
{/* Auto-follow toggle - top right */}
|
||||
<button
|
||||
onClick={handleToggleAutoFollow}
|
||||
className={`absolute top-2 right-4 z-50 px-2 py-1 rounded text-[10px] font-medium border shadow-sm transition-colors cursor-pointer ${
|
||||
isAutoFollow
|
||||
? "bg-[#1a2332] text-[#3fb950] border-[#238636] hover:bg-[#1f2d3d]"
|
||||
: "bg-[#1f2937] text-[#8b949e] border-[#30363d] hover:bg-[#2d3748]"
|
||||
}`}
|
||||
title={isAutoFollow ? "Auto-scrolling to latest output (click to pause)" : "Auto-scroll paused (click to resume)"}
|
||||
>
|
||||
{isAutoFollow ? "▼ Following" : "▽ Paused"}
|
||||
</button>
|
||||
{/* Jump to Current - bottom right, when scrolled up */}
|
||||
{!isAtBottom && (
|
||||
<button
|
||||
onClick={handleScrollToBottom}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import type { Project, ProjectPath, ContainerInfo, SiblingContainer, AppSettings, UpdateInfo, ImageUpdateInfo, McpServer, FileEntry } from "./types";
|
||||
import type { Project, ProjectPath, ContainerInfo, SiblingContainer, AppSettings, UpdateInfo, ImageUpdateInfo, McpServer, FileEntry, WebTerminalInfo } from "./types";
|
||||
|
||||
// Docker
|
||||
export const checkDocker = () => invoke<boolean>("check_docker");
|
||||
@@ -85,3 +85,16 @@ export const checkForUpdates = () =>
|
||||
invoke<UpdateInfo | null>("check_for_updates");
|
||||
export const checkImageUpdate = () =>
|
||||
invoke<ImageUpdateInfo | null>("check_image_update");
|
||||
|
||||
// Help
|
||||
export const getHelpContent = () => invoke<string>("get_help_content");
|
||||
|
||||
// Web Terminal
|
||||
export const startWebTerminal = () =>
|
||||
invoke<WebTerminalInfo>("start_web_terminal");
|
||||
export const stopWebTerminal = () =>
|
||||
invoke<void>("stop_web_terminal");
|
||||
export const getWebTerminalStatus = () =>
|
||||
invoke<WebTerminalInfo>("get_web_terminal_status");
|
||||
export const regenerateWebTerminalToken = () =>
|
||||
invoke<WebTerminalInfo>("regenerate_web_terminal_token");
|
||||
|
||||
@@ -23,9 +23,10 @@ export interface Project {
|
||||
backend: Backend;
|
||||
bedrock_config: BedrockConfig | null;
|
||||
ollama_config: OllamaConfig | null;
|
||||
litellm_config: LiteLlmConfig | null;
|
||||
openai_compatible_config: OpenAiCompatibleConfig | null;
|
||||
allow_docker_access: boolean;
|
||||
mission_control_enabled: boolean;
|
||||
full_permissions: boolean;
|
||||
ssh_key_path: string | null;
|
||||
git_token: string | null;
|
||||
git_user_name: string | null;
|
||||
@@ -45,7 +46,7 @@ export type ProjectStatus =
|
||||
| "stopping"
|
||||
| "error";
|
||||
|
||||
export type Backend = "anthropic" | "bedrock" | "ollama" | "lite_llm";
|
||||
export type Backend = "anthropic" | "bedrock" | "ollama" | "open_ai_compatible";
|
||||
|
||||
export type BedrockAuthMethod = "static_credentials" | "profile" | "bearer_token";
|
||||
|
||||
@@ -66,7 +67,7 @@ export interface OllamaConfig {
|
||||
model_id: string | null;
|
||||
}
|
||||
|
||||
export interface LiteLlmConfig {
|
||||
export interface OpenAiCompatibleConfig {
|
||||
base_url: string;
|
||||
api_key: string | null;
|
||||
model_id: string | null;
|
||||
@@ -117,6 +118,21 @@ export interface AppSettings {
|
||||
timezone: string | null;
|
||||
default_microphone: string | null;
|
||||
dismissed_image_digest: string | null;
|
||||
web_terminal: WebTerminalSettings;
|
||||
}
|
||||
|
||||
export interface WebTerminalSettings {
|
||||
enabled: boolean;
|
||||
port: number;
|
||||
access_token: string | null;
|
||||
}
|
||||
|
||||
export interface WebTerminalInfo {
|
||||
running: boolean;
|
||||
port: number;
|
||||
access_token: string;
|
||||
local_ip: string | null;
|
||||
url: string | null;
|
||||
}
|
||||
|
||||
export interface UpdateInfo {
|
||||
|
||||
@@ -24,6 +24,8 @@ interface AppState {
|
||||
removeMcpServerFromList: (id: string) => void;
|
||||
|
||||
// UI state
|
||||
terminalHasSelection: boolean;
|
||||
setTerminalHasSelection: (has: boolean) => void;
|
||||
sidebarView: "projects" | "mcp" | "settings";
|
||||
setSidebarView: (view: "projects" | "mcp" | "settings") => void;
|
||||
dockerAvailable: boolean | null;
|
||||
@@ -100,6 +102,8 @@ export const useAppState = create<AppState>((set) => ({
|
||||
})),
|
||||
|
||||
// UI state
|
||||
terminalHasSelection: false,
|
||||
setTerminalHasSelection: (has) => set({ terminalHasSelection: has }),
|
||||
sidebarView: "projects",
|
||||
setSidebarView: (view) => set({ sidebarView: view }),
|
||||
dockerAvailable: null,
|
||||
|
||||
Reference in New Issue
Block a user