Compare commits
6 Commits
v0.1.96-wi
...
v0.2.1
| Author | SHA1 | Date | |
|---|---|---|---|
| beae0942a1 | |||
| 6b49981b3a | |||
| b46b392a9a | |||
| 4889dd974f | |||
| b6fd8a557e | |||
| 93deab68a7 |
@@ -5,11 +5,13 @@ on:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "app/**"
|
||||
- "VERSION"
|
||||
- ".gitea/workflows/build-app.yml"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "app/**"
|
||||
- "VERSION"
|
||||
- ".gitea/workflows/build-app.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -18,10 +20,43 @@ env:
|
||||
REPO: ${{ gitea.repository }}
|
||||
|
||||
jobs:
|
||||
build-linux:
|
||||
compute-version:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.VERSION }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fetch all tags
|
||||
run: git fetch --tags
|
||||
|
||||
- name: Compute version from VERSION file and tags
|
||||
id: version
|
||||
run: |
|
||||
MAJOR_MINOR=$(cat VERSION | tr -d '[:space:]')
|
||||
echo "Major.Minor: ${MAJOR_MINOR}"
|
||||
|
||||
# Find the latest tag matching v{MAJOR_MINOR}.N (exclude -mac, -win suffixes)
|
||||
LATEST_TAG=$(git tag -l "v${MAJOR_MINOR}.*" --sort=-v:refname | grep -E "^v${MAJOR_MINOR}\.[0-9]+$" | head -1)
|
||||
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
echo "Latest matching tag: ${LATEST_TAG}"
|
||||
PATCH=$(git rev-list --count "${LATEST_TAG}..HEAD")
|
||||
else
|
||||
echo "No matching tag found for v${MAJOR_MINOR}.*, using total commit count"
|
||||
PATCH=$(git rev-list --count HEAD)
|
||||
fi
|
||||
|
||||
VERSION="${MAJOR_MINOR}.${PATCH}"
|
||||
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Computed version: ${VERSION}"
|
||||
|
||||
build-linux:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [compute-version]
|
||||
steps:
|
||||
- name: Install Node.js 22
|
||||
run: |
|
||||
@@ -54,17 +89,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Compute version
|
||||
id: version
|
||||
run: |
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
VERSION="0.1.${COMMIT_COUNT}"
|
||||
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Computed version: ${VERSION}"
|
||||
|
||||
- name: Set app version
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.VERSION }}"
|
||||
VERSION="${{ needs.compute-version.outputs.version }}"
|
||||
sed -i "s/\"version\": \".*\"/\"version\": \"${VERSION}\"/" app/src-tauri/tauri.conf.json
|
||||
sed -i "s/\"version\": \".*\"/\"version\": \"${VERSION}\"/" app/package.json
|
||||
sed -i "s/^version = \".*\"/version = \"${VERSION}\"/" app/src-tauri/Cargo.toml
|
||||
@@ -133,7 +160,7 @@ jobs:
|
||||
env:
|
||||
TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||
run: |
|
||||
TAG="v${{ steps.version.outputs.VERSION }}"
|
||||
TAG="v${{ needs.compute-version.outputs.version }}"
|
||||
# Create release
|
||||
curl -s -X POST \
|
||||
-H "Authorization: token ${TOKEN}" \
|
||||
@@ -156,6 +183,7 @@ jobs:
|
||||
|
||||
build-macos:
|
||||
runs-on: macos-latest
|
||||
needs: [compute-version]
|
||||
steps:
|
||||
- name: Install Node.js 22
|
||||
run: |
|
||||
@@ -183,17 +211,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Compute version
|
||||
id: version
|
||||
run: |
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
VERSION="0.1.${COMMIT_COUNT}"
|
||||
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Computed version: ${VERSION}"
|
||||
|
||||
- name: Set app version
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.VERSION }}"
|
||||
VERSION="${{ needs.compute-version.outputs.version }}"
|
||||
sed -i '' "s/\"version\": \".*\"/\"version\": \"${VERSION}\"/" app/src-tauri/tauri.conf.json
|
||||
sed -i '' "s/\"version\": \".*\"/\"version\": \"${VERSION}\"/" app/package.json
|
||||
sed -i '' "s/^version = \".*\"/version = \"${VERSION}\"/" app/src-tauri/Cargo.toml
|
||||
@@ -243,12 +263,12 @@ jobs:
|
||||
env:
|
||||
TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||
run: |
|
||||
TAG="v${{ steps.version.outputs.VERSION }}-mac"
|
||||
TAG="v${{ needs.compute-version.outputs.version }}-mac"
|
||||
# Create release
|
||||
curl -s -X POST \
|
||||
-H "Authorization: token ${TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"tag_name\": \"${TAG}\", \"name\": \"Triple-C v${{ steps.version.outputs.VERSION }} (macOS)\", \"body\": \"Automated build from commit ${{ gitea.sha }}\"}" \
|
||||
-d "{\"tag_name\": \"${TAG}\", \"name\": \"Triple-C v${{ needs.compute-version.outputs.version }} (macOS)\", \"body\": \"Automated build from commit ${{ gitea.sha }}\"}" \
|
||||
"${GITEA_URL}/api/v1/repos/${REPO}/releases" > release.json
|
||||
RELEASE_ID=$(cat release.json | grep -o '"id":[0-9]*' | head -1 | grep -o '[0-9]*')
|
||||
echo "Release ID: ${RELEASE_ID}"
|
||||
@@ -266,6 +286,7 @@ jobs:
|
||||
|
||||
build-windows:
|
||||
runs-on: windows-latest
|
||||
needs: [compute-version]
|
||||
defaults:
|
||||
run:
|
||||
shell: cmd
|
||||
@@ -275,18 +296,10 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Compute version
|
||||
id: version
|
||||
run: |
|
||||
for /f %%i in ('git rev-list --count HEAD') do set "COMMIT_COUNT=%%i"
|
||||
set "VERSION=0.1.%COMMIT_COUNT%"
|
||||
echo VERSION=%VERSION%>> %GITHUB_OUTPUT%
|
||||
echo Computed version: %VERSION%
|
||||
|
||||
- name: Set app version
|
||||
shell: powershell
|
||||
run: |
|
||||
$version = "${{ steps.version.outputs.VERSION }}"
|
||||
$version = "${{ needs.compute-version.outputs.version }}"
|
||||
(Get-Content app/src-tauri/tauri.conf.json) -replace '"version": ".*?"', "`"version`": `"$version`"" | Set-Content app/src-tauri/tauri.conf.json
|
||||
(Get-Content app/package.json) -replace '"version": ".*?"', "`"version`": `"$version`"" | Set-Content app/package.json
|
||||
(Get-Content app/src-tauri/Cargo.toml) -replace '^version = ".*?"', "version = `"$version`"" | Set-Content app/src-tauri/Cargo.toml
|
||||
@@ -367,9 +380,9 @@ jobs:
|
||||
TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||
COMMIT_SHA: ${{ gitea.sha }}
|
||||
run: |
|
||||
set "TAG=v${{ steps.version.outputs.VERSION }}-win"
|
||||
set "TAG=v${{ needs.compute-version.outputs.version }}-win"
|
||||
echo Creating release %TAG%...
|
||||
curl -s -X POST -H "Authorization: token %TOKEN%" -H "Content-Type: application/json" -d "{\"tag_name\": \"%TAG%\", \"name\": \"Triple-C v${{ steps.version.outputs.VERSION }} (Windows)\", \"body\": \"Automated build from commit %COMMIT_SHA%\"}" "%GITEA_URL%/api/v1/repos/%REPO%/releases" > release.json
|
||||
curl -s -X POST -H "Authorization: token %TOKEN%" -H "Content-Type: application/json" -d "{\"tag_name\": \"%TAG%\", \"name\": \"Triple-C v${{ needs.compute-version.outputs.version }} (Windows)\", \"body\": \"Automated build from commit %COMMIT_SHA%\"}" "%GITEA_URL%/api/v1/repos/%REPO%/releases" > release.json
|
||||
for /f "tokens=2 delims=:," %%a in ('findstr /c:"\"id\"" release.json') do set "RELEASE_ID=%%a" & goto :found
|
||||
:found
|
||||
echo Release ID: %RELEASE_ID%
|
||||
@@ -378,9 +391,36 @@ jobs:
|
||||
curl -s -X POST -H "Authorization: token %TOKEN%" -H "Content-Type: application/octet-stream" --data-binary "@%%f" "%GITEA_URL%/api/v1/repos/%REPO%/releases/%RELEASE_ID%/assets?name=%%~nxf"
|
||||
)
|
||||
|
||||
create-tag:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [compute-version, build-linux, build-macos, build-windows]
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create version tag
|
||||
env:
|
||||
TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||
run: |
|
||||
VERSION="${{ needs.compute-version.outputs.version }}"
|
||||
TAG="v${VERSION}"
|
||||
echo "Creating tag ${TAG}..."
|
||||
|
||||
# Create annotated tag via Gitea API
|
||||
curl -s -X POST \
|
||||
-H "Authorization: token ${TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"tag_name\": \"${TAG}\", \"target\": \"${{ gitea.sha }}\", \"message\": \"Release ${TAG}\"}" \
|
||||
"${GITEA_URL}/api/v1/repos/${REPO}/tags" || echo "Tag may already exist (created by release)"
|
||||
|
||||
echo "Tag ${TAG} created successfully"
|
||||
|
||||
sync-to-github:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-linux, build-macos, build-windows]
|
||||
needs: [compute-version, build-linux, build-macos, build-windows]
|
||||
if: gitea.event_name == 'push'
|
||||
env:
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
@@ -389,7 +429,7 @@ jobs:
|
||||
- name: Download artifacts from Gitea releases
|
||||
env:
|
||||
TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||
VERSION: ${{ needs.build-linux.outputs.version }}
|
||||
VERSION: ${{ needs.compute-version.outputs.version }}
|
||||
run: |
|
||||
set -e
|
||||
mkdir -p artifacts
|
||||
@@ -418,7 +458,7 @@ jobs:
|
||||
|
||||
- name: Create GitHub release and upload artifacts
|
||||
env:
|
||||
VERSION: ${{ needs.build-linux.outputs.version }}
|
||||
VERSION: ${{ needs.compute-version.outputs.version }}
|
||||
COMMIT_SHA: ${{ gitea.sha }}
|
||||
run: |
|
||||
set -e
|
||||
|
||||
@@ -72,7 +72,7 @@ docker exec stdout → tokio task → emit("terminal-output-{sessionId}") → li
|
||||
- `container.rs` — Container lifecycle (create, start, stop, remove, inspect)
|
||||
- `exec.rs` — PTY exec sessions with bidirectional stdin/stdout streaming
|
||||
- `image.rs` — Image build/pull with progress streaming
|
||||
- **`models/`** — Serde structs (`Project`, `AuthMode`, `BedrockConfig`, `ContainerInfo`, `AppSettings`). These define the IPC contract with the frontend.
|
||||
- **`models/`** — Serde structs (`Project`, `AuthMode`, `BedrockConfig`, `OllamaConfig`, `LiteLlmConfig`, `ContainerInfo`, `AppSettings`). These define the IPC contract with the frontend.
|
||||
- **`storage/`** — Persistence: `projects_store.rs` (JSON file with atomic writes), `secure.rs` (OS keychain via `keyring` crate), `settings_store.rs`
|
||||
|
||||
### Container (`container/`)
|
||||
@@ -90,6 +90,8 @@ Containers use a **stop/start** model (not create/destroy). Installed packages p
|
||||
Per-project, independently configured:
|
||||
- **Anthropic (OAuth)** — `claude login` in terminal, token persists in config volume
|
||||
- **AWS Bedrock** — Static keys, profile, or bearer token injected as env vars
|
||||
- **Ollama** — Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`)
|
||||
- **LiteLLM** — Connect through a LiteLLM proxy gateway via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN` to access 100+ model providers
|
||||
|
||||
## Styling
|
||||
|
||||
|
||||
@@ -33,6 +33,8 @@ You need access to Claude Code through one of:
|
||||
|
||||
- **Anthropic account** — Sign up at https://claude.ai and use `claude login` (OAuth) inside the terminal
|
||||
- **AWS Bedrock** — An AWS account with Bedrock access and Claude models enabled
|
||||
- **Ollama** — A local or remote Ollama server running an Anthropic-compatible model (best-effort support)
|
||||
- **LiteLLM** — A LiteLLM proxy gateway providing access to 100+ model providers (best-effort support)
|
||||
|
||||
---
|
||||
|
||||
@@ -88,6 +90,20 @@ Claude Code launches automatically with `--dangerously-skip-permissions` inside
|
||||
3. Expand the **Config** panel and fill in your AWS credentials (see [AWS Bedrock Configuration](#aws-bedrock-configuration) below).
|
||||
4. Start the container again.
|
||||
|
||||
**Ollama:**
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the auth mode to **Ollama**.
|
||||
3. Expand the **Config** panel and set the base URL of your Ollama server (defaults to `http://host.docker.internal:11434` for a local instance). Optionally set a model ID.
|
||||
4. Start the container again.
|
||||
|
||||
**LiteLLM:**
|
||||
|
||||
1. Stop the container first (settings can only be changed while stopped).
|
||||
2. In the project card, switch the auth mode to **LiteLLM**.
|
||||
3. Expand the **Config** panel and set the base URL of your LiteLLM proxy (defaults to `http://host.docker.internal:4000`). Optionally set an API key and model ID.
|
||||
4. Start the container again.
|
||||
|
||||
---
|
||||
|
||||
## The Interface
|
||||
@@ -372,6 +388,41 @@ Per-project settings always override these global defaults.
|
||||
|
||||
---
|
||||
|
||||
## Ollama Configuration
|
||||
|
||||
To use Claude Code with a local or remote Ollama server, switch the auth mode to **Ollama** on the project card.
|
||||
|
||||
### Settings
|
||||
|
||||
- **Base URL** — The URL of your Ollama server. Defaults to `http://host.docker.internal:11434`, which reaches a locally running Ollama instance from inside the container. For a remote server, use its IP or hostname (e.g., `http://192.168.1.100:11434`).
|
||||
- **Model ID** — Optional. Override the model to use (e.g., `qwen3.5:27b`).
|
||||
|
||||
### How It Works
|
||||
|
||||
Triple-C sets `ANTHROPIC_BASE_URL` to point Claude Code at your Ollama server instead of Anthropic's API. The `ANTHROPIC_AUTH_TOKEN` is set to `ollama` (required by Claude Code but not used for actual authentication).
|
||||
|
||||
> **Note:** Ollama support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models.
|
||||
|
||||
---
|
||||
|
||||
## LiteLLM Configuration
|
||||
|
||||
To use Claude Code through a [LiteLLM](https://docs.litellm.ai/) proxy gateway, switch the auth mode to **LiteLLM** on the project card. LiteLLM supports 100+ model providers (OpenAI, Gemini, Anthropic, and more) through a single proxy.
|
||||
|
||||
### Settings
|
||||
|
||||
- **Base URL** — The URL of your LiteLLM proxy. Defaults to `http://host.docker.internal:4000` for a locally running proxy.
|
||||
- **API Key** — Optional. The API key for your LiteLLM proxy, if authentication is required. Stored securely in your OS keychain.
|
||||
- **Model ID** — Optional. Override the model to use.
|
||||
|
||||
### How It Works
|
||||
|
||||
Triple-C sets `ANTHROPIC_BASE_URL` to point Claude Code at your LiteLLM proxy. If an API key is provided, it is set as `ANTHROPIC_AUTH_TOKEN`.
|
||||
|
||||
> **Note:** LiteLLM support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected when routing to non-Anthropic models through the proxy.
|
||||
|
||||
---
|
||||
|
||||
## Settings
|
||||
|
||||
Access global settings via the **Settings** tab in the sidebar.
|
||||
|
||||
@@ -49,6 +49,10 @@ Each project can independently use one of:
|
||||
|
||||
- **Anthropic** (OAuth): User runs `claude login` inside the terminal on first use. Token persisted in the config volume across restarts and resets.
|
||||
- **AWS Bedrock**: Per-project AWS credentials (static keys, profile, or bearer token). SSO sessions are validated before launching Claude for Profile auth.
|
||||
- **Ollama**: Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`). Optional model override.
|
||||
- **LiteLLM**: Connect through a LiteLLM proxy gateway via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN` to access 100+ model providers. API key stored securely in OS keychain.
|
||||
|
||||
> **Note:** Ollama and LiteLLM support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models behind these backends.
|
||||
|
||||
### Container Spawning (Sibling Containers)
|
||||
|
||||
|
||||
68
app/package-lock.json
generated
68
app/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "triple-c",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "triple-c",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": "^2",
|
||||
"@tauri-apps/plugin-dialog": "^2",
|
||||
@@ -1643,6 +1643,70 @@
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": {
|
||||
"version": "1.8.1",
|
||||
"dev": true,
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"@emnapi/wasi-threads": "1.1.0",
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": {
|
||||
"version": "1.8.1",
|
||||
"dev": true,
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": {
|
||||
"version": "1.1.0",
|
||||
"dev": true,
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": {
|
||||
"version": "1.1.1",
|
||||
"dev": true,
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"@emnapi/core": "^1.7.1",
|
||||
"@emnapi/runtime": "^1.7.1",
|
||||
"@tybys/wasm-util": "^0.10.1"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/Brooooooklyn"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": {
|
||||
"version": "0.10.1",
|
||||
"dev": true,
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": {
|
||||
"version": "2.8.1",
|
||||
"dev": true,
|
||||
"inBundle": true,
|
||||
"license": "0BSD",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/@tailwindcss/oxide-win32-arm64-msvc": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.1.tgz",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "triple-c",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
2
app/src-tauri/Cargo.lock
generated
2
app/src-tauri/Cargo.lock
generated
@@ -4668,7 +4668,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "triple-c"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"bollard",
|
||||
"chrono",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "triple-c"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
|
||||
@@ -34,6 +34,11 @@ fn store_secrets_for_project(project: &Project) -> Result<(), String> {
|
||||
secure::store_project_secret(&project.id, "aws-bearer-token", v)?;
|
||||
}
|
||||
}
|
||||
if let Some(ref litellm) = project.litellm_config {
|
||||
if let Some(ref v) = litellm.api_key {
|
||||
secure::store_project_secret(&project.id, "litellm-api-key", v)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -51,6 +56,10 @@ fn load_secrets_for_project(project: &mut Project) {
|
||||
bedrock.aws_bearer_token = secure::get_project_secret(&project.id, "aws-bearer-token")
|
||||
.unwrap_or(None);
|
||||
}
|
||||
if let Some(ref mut litellm) = project.litellm_config {
|
||||
litellm.api_key = secure::get_project_secret(&project.id, "litellm-api-key")
|
||||
.unwrap_or(None);
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve enabled MCP servers and filter to Docker-only ones.
|
||||
@@ -180,6 +189,22 @@ pub async fn start_project_container(
|
||||
}
|
||||
}
|
||||
|
||||
if project.auth_mode == AuthMode::Ollama {
|
||||
let ollama = project.ollama_config.as_ref()
|
||||
.ok_or_else(|| "Ollama auth mode selected but no Ollama configuration found.".to_string())?;
|
||||
if ollama.base_url.is_empty() {
|
||||
return Err("Ollama base URL is required.".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if project.auth_mode == AuthMode::LiteLlm {
|
||||
let litellm = project.litellm_config.as_ref()
|
||||
.ok_or_else(|| "LiteLLM auth mode selected but no LiteLLM configuration found.".to_string())?;
|
||||
if litellm.base_url.is_empty() {
|
||||
return Err("LiteLLM base URL is required.".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Update status to starting
|
||||
state.projects_store.update_status(&project_id, ProjectStatus::Starting)?;
|
||||
|
||||
|
||||
@@ -231,6 +231,33 @@ fn compute_bedrock_fingerprint(project: &Project) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute a fingerprint for the Ollama configuration so we can detect changes.
|
||||
fn compute_ollama_fingerprint(project: &Project) -> String {
|
||||
if let Some(ref ollama) = project.ollama_config {
|
||||
let parts = vec![
|
||||
ollama.base_url.clone(),
|
||||
ollama.model_id.as_deref().unwrap_or("").to_string(),
|
||||
];
|
||||
sha256_hex(&parts.join("|"))
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute a fingerprint for the LiteLLM configuration so we can detect changes.
|
||||
fn compute_litellm_fingerprint(project: &Project) -> String {
|
||||
if let Some(ref litellm) = project.litellm_config {
|
||||
let parts = vec![
|
||||
litellm.base_url.clone(),
|
||||
litellm.api_key.as_deref().unwrap_or("").to_string(),
|
||||
litellm.model_id.as_deref().unwrap_or("").to_string(),
|
||||
];
|
||||
sha256_hex(&parts.join("|"))
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute a fingerprint for the project paths so we can detect changes.
|
||||
/// Sorted by mount_name so order changes don't cause spurious recreation.
|
||||
fn compute_paths_fingerprint(paths: &[ProjectPath]) -> String {
|
||||
@@ -478,6 +505,30 @@ pub async fn create_container(
|
||||
}
|
||||
}
|
||||
|
||||
// Ollama configuration
|
||||
if project.auth_mode == AuthMode::Ollama {
|
||||
if let Some(ref ollama) = project.ollama_config {
|
||||
env_vars.push(format!("ANTHROPIC_BASE_URL={}", ollama.base_url));
|
||||
env_vars.push("ANTHROPIC_AUTH_TOKEN=ollama".to_string());
|
||||
if let Some(ref model) = ollama.model_id {
|
||||
env_vars.push(format!("ANTHROPIC_MODEL={}", model));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// LiteLLM configuration
|
||||
if project.auth_mode == AuthMode::LiteLlm {
|
||||
if let Some(ref litellm) = project.litellm_config {
|
||||
env_vars.push(format!("ANTHROPIC_BASE_URL={}", litellm.base_url));
|
||||
if let Some(ref key) = litellm.api_key {
|
||||
env_vars.push(format!("ANTHROPIC_AUTH_TOKEN={}", key));
|
||||
}
|
||||
if let Some(ref model) = litellm.model_id {
|
||||
env_vars.push(format!("ANTHROPIC_MODEL={}", model));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Custom environment variables (global + per-project, project overrides global for same key)
|
||||
let merged_env = merge_custom_env_vars(global_custom_env_vars, &project.custom_env_vars);
|
||||
let reserved_prefixes = ["ANTHROPIC_", "AWS_", "GIT_", "HOST_", "CLAUDE_", "TRIPLE_C_"];
|
||||
@@ -646,6 +697,8 @@ pub async fn create_container(
|
||||
labels.insert("triple-c.auth-mode".to_string(), format!("{:?}", project.auth_mode));
|
||||
labels.insert("triple-c.paths-fingerprint".to_string(), compute_paths_fingerprint(&project.paths));
|
||||
labels.insert("triple-c.bedrock-fingerprint".to_string(), compute_bedrock_fingerprint(project));
|
||||
labels.insert("triple-c.ollama-fingerprint".to_string(), compute_ollama_fingerprint(project));
|
||||
labels.insert("triple-c.litellm-fingerprint".to_string(), compute_litellm_fingerprint(project));
|
||||
labels.insert("triple-c.ports-fingerprint".to_string(), compute_ports_fingerprint(&project.port_mappings));
|
||||
labels.insert("triple-c.image".to_string(), image_name.to_string());
|
||||
labels.insert("triple-c.timezone".to_string(), timezone.unwrap_or("").to_string());
|
||||
@@ -885,6 +938,22 @@ pub async fn container_needs_recreation(
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── Ollama config fingerprint ────────────────────────────────────────
|
||||
let expected_ollama_fp = compute_ollama_fingerprint(project);
|
||||
let container_ollama_fp = get_label("triple-c.ollama-fingerprint").unwrap_or_default();
|
||||
if container_ollama_fp != expected_ollama_fp {
|
||||
log::info!("Ollama config mismatch");
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── LiteLLM config fingerprint ───────────────────────────────────────
|
||||
let expected_litellm_fp = compute_litellm_fingerprint(project);
|
||||
let container_litellm_fp = get_label("triple-c.litellm-fingerprint").unwrap_or_default();
|
||||
if container_litellm_fp != expected_litellm_fp {
|
||||
log::info!("LiteLLM config mismatch");
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// ── Image ────────────────────────────────────────────────────────────
|
||||
// The image label is set at creation time; if the user changed the
|
||||
// configured image we need to recreate. We only compare when the
|
||||
@@ -1074,11 +1143,6 @@ pub fn any_stdio_docker_mcp(servers: &[McpServer]) -> bool {
|
||||
servers.iter().any(|s| s.is_docker() && s.transport_type == McpTransportType::Stdio)
|
||||
}
|
||||
|
||||
/// Returns true if any MCP server uses Docker.
|
||||
pub fn any_docker_mcp(servers: &[McpServer]) -> bool {
|
||||
servers.iter().any(|s| s.is_docker())
|
||||
}
|
||||
|
||||
/// Find an existing MCP container by its expected name.
|
||||
pub async fn find_mcp_container(server: &McpServer) -> Result<Option<String>, String> {
|
||||
let docker = get_docker()?;
|
||||
|
||||
@@ -22,6 +22,7 @@ impl ExecSession {
|
||||
.map_err(|e| format!("Failed to send input: {}", e))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn resize(&self, cols: u16, rows: u16) -> Result<(), String> {
|
||||
let docker = get_docker()?;
|
||||
docker
|
||||
|
||||
@@ -4,8 +4,13 @@ pub mod image;
|
||||
pub mod exec;
|
||||
pub mod network;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub use client::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use container::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use image::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use exec::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use network::*;
|
||||
|
||||
@@ -48,6 +48,7 @@ pub async fn ensure_project_network(project_id: &str) -> Result<String, String>
|
||||
}
|
||||
|
||||
/// Connect a container to the project network.
|
||||
#[allow(dead_code)]
|
||||
pub async fn connect_container_to_network(
|
||||
container_id: &str,
|
||||
network_name: &str,
|
||||
|
||||
@@ -33,6 +33,8 @@ pub struct Project {
|
||||
pub status: ProjectStatus,
|
||||
pub auth_mode: AuthMode,
|
||||
pub bedrock_config: Option<BedrockConfig>,
|
||||
pub ollama_config: Option<OllamaConfig>,
|
||||
pub litellm_config: Option<LiteLlmConfig>,
|
||||
pub allow_docker_access: bool,
|
||||
#[serde(default)]
|
||||
pub mission_control_enabled: bool,
|
||||
@@ -74,6 +76,9 @@ pub enum AuthMode {
|
||||
#[serde(alias = "login", alias = "api_key")]
|
||||
Anthropic,
|
||||
Bedrock,
|
||||
Ollama,
|
||||
#[serde(alias = "litellm")]
|
||||
LiteLlm,
|
||||
}
|
||||
|
||||
impl Default for AuthMode {
|
||||
@@ -115,6 +120,29 @@ pub struct BedrockConfig {
|
||||
pub disable_prompt_caching: bool,
|
||||
}
|
||||
|
||||
/// Ollama configuration for a project.
|
||||
/// Ollama exposes an Anthropic-compatible API endpoint.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OllamaConfig {
|
||||
/// The base URL of the Ollama server (e.g., "http://host.docker.internal:11434" or "http://192.168.1.100:11434")
|
||||
pub base_url: String,
|
||||
/// Optional model override (e.g., "qwen3.5:27b")
|
||||
pub model_id: Option<String>,
|
||||
}
|
||||
|
||||
/// LiteLLM gateway configuration for a project.
|
||||
/// LiteLLM translates Anthropic API calls to 100+ model providers.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LiteLlmConfig {
|
||||
/// The base URL of the LiteLLM proxy (e.g., "http://host.docker.internal:4000" or "https://litellm.example.com")
|
||||
pub base_url: String,
|
||||
/// API key for the LiteLLM proxy
|
||||
#[serde(skip_serializing, default)]
|
||||
pub api_key: Option<String>,
|
||||
/// Optional model override
|
||||
pub model_id: Option<String>,
|
||||
}
|
||||
|
||||
impl Project {
|
||||
pub fn new(name: String, paths: Vec<ProjectPath>) -> Self {
|
||||
let now = chrono::Utc::now().to_rfc3339();
|
||||
@@ -126,6 +154,8 @@ impl Project {
|
||||
status: ProjectStatus::Stopped,
|
||||
auth_mode: AuthMode::default(),
|
||||
bedrock_config: None,
|
||||
ollama_config: None,
|
||||
litellm_config: None,
|
||||
allow_docker_access: false,
|
||||
mission_control_enabled: false,
|
||||
ssh_key_path: None,
|
||||
|
||||
@@ -3,7 +3,11 @@ pub mod secure;
|
||||
pub mod settings_store;
|
||||
pub mod mcp_store;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub use projects_store::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use secure::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use settings_store::*;
|
||||
#[allow(unused_imports)]
|
||||
pub use mcp_store::*;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-cli/schema.json",
|
||||
"productName": "Triple-C",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"identifier": "com.triple-c.desktop",
|
||||
"build": {
|
||||
"beforeDevCommand": "npm run dev",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { open } from "@tauri-apps/plugin-dialog";
|
||||
import { listen } from "@tauri-apps/api/event";
|
||||
import type { Project, ProjectPath, AuthMode, BedrockConfig, BedrockAuthMethod } from "../../lib/types";
|
||||
import type { Project, ProjectPath, AuthMode, BedrockConfig, BedrockAuthMethod, OllamaConfig, LiteLlmConfig } from "../../lib/types";
|
||||
import { useProjects } from "../../hooks/useProjects";
|
||||
import { useMcpServers } from "../../hooks/useMcpServers";
|
||||
import { useTerminal } from "../../hooks/useTerminal";
|
||||
@@ -58,6 +58,15 @@ export default function ProjectCard({ project }: Props) {
|
||||
const [bedrockBearerToken, setBedrockBearerToken] = useState(project.bedrock_config?.aws_bearer_token ?? "");
|
||||
const [bedrockModelId, setBedrockModelId] = useState(project.bedrock_config?.model_id ?? "");
|
||||
|
||||
// Ollama local state
|
||||
const [ollamaBaseUrl, setOllamaBaseUrl] = useState(project.ollama_config?.base_url ?? "http://host.docker.internal:11434");
|
||||
const [ollamaModelId, setOllamaModelId] = useState(project.ollama_config?.model_id ?? "");
|
||||
|
||||
// LiteLLM local state
|
||||
const [litellmBaseUrl, setLitellmBaseUrl] = useState(project.litellm_config?.base_url ?? "http://host.docker.internal:4000");
|
||||
const [litellmApiKey, setLitellmApiKey] = useState(project.litellm_config?.api_key ?? "");
|
||||
const [litellmModelId, setLitellmModelId] = useState(project.litellm_config?.model_id ?? "");
|
||||
|
||||
// Sync local state when project prop changes (e.g., after save or external update)
|
||||
useEffect(() => {
|
||||
setEditName(project.name);
|
||||
@@ -76,6 +85,11 @@ export default function ProjectCard({ project }: Props) {
|
||||
setBedrockProfile(project.bedrock_config?.aws_profile ?? "");
|
||||
setBedrockBearerToken(project.bedrock_config?.aws_bearer_token ?? "");
|
||||
setBedrockModelId(project.bedrock_config?.model_id ?? "");
|
||||
setOllamaBaseUrl(project.ollama_config?.base_url ?? "http://host.docker.internal:11434");
|
||||
setOllamaModelId(project.ollama_config?.model_id ?? "");
|
||||
setLitellmBaseUrl(project.litellm_config?.base_url ?? "http://host.docker.internal:4000");
|
||||
setLitellmApiKey(project.litellm_config?.api_key ?? "");
|
||||
setLitellmModelId(project.litellm_config?.model_id ?? "");
|
||||
}, [project]);
|
||||
|
||||
// Listen for container progress events
|
||||
@@ -177,12 +191,29 @@ export default function ProjectCard({ project }: Props) {
|
||||
disable_prompt_caching: false,
|
||||
};
|
||||
|
||||
const defaultOllamaConfig: OllamaConfig = {
|
||||
base_url: "http://host.docker.internal:11434",
|
||||
model_id: null,
|
||||
};
|
||||
|
||||
const defaultLiteLlmConfig: LiteLlmConfig = {
|
||||
base_url: "http://host.docker.internal:4000",
|
||||
api_key: null,
|
||||
model_id: null,
|
||||
};
|
||||
|
||||
const handleAuthModeChange = async (mode: AuthMode) => {
|
||||
try {
|
||||
const updates: Partial<Project> = { auth_mode: mode };
|
||||
if (mode === "bedrock" && !project.bedrock_config) {
|
||||
updates.bedrock_config = defaultBedrockConfig;
|
||||
}
|
||||
if (mode === "ollama" && !project.ollama_config) {
|
||||
updates.ollama_config = defaultOllamaConfig;
|
||||
}
|
||||
if (mode === "lit_llm" && !project.litellm_config) {
|
||||
updates.litellm_config = defaultLiteLlmConfig;
|
||||
}
|
||||
await update({ ...project, ...updates });
|
||||
} catch (e) {
|
||||
setError(String(e));
|
||||
@@ -305,6 +336,51 @@ export default function ProjectCard({ project }: Props) {
|
||||
}
|
||||
};
|
||||
|
||||
const handleOllamaBaseUrlBlur = async () => {
|
||||
try {
|
||||
const current = project.ollama_config ?? defaultOllamaConfig;
|
||||
await update({ ...project, ollama_config: { ...current, base_url: ollamaBaseUrl } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update Ollama base URL:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleOllamaModelIdBlur = async () => {
|
||||
try {
|
||||
const current = project.ollama_config ?? defaultOllamaConfig;
|
||||
await update({ ...project, ollama_config: { ...current, model_id: ollamaModelId || null } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update Ollama model ID:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLitellmBaseUrlBlur = async () => {
|
||||
try {
|
||||
const current = project.litellm_config ?? defaultLiteLlmConfig;
|
||||
await update({ ...project, litellm_config: { ...current, base_url: litellmBaseUrl } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update LiteLLM base URL:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLitellmApiKeyBlur = async () => {
|
||||
try {
|
||||
const current = project.litellm_config ?? defaultLiteLlmConfig;
|
||||
await update({ ...project, litellm_config: { ...current, api_key: litellmApiKey || null } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update LiteLLM API key:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLitellmModelIdBlur = async () => {
|
||||
try {
|
||||
const current = project.litellm_config ?? defaultLiteLlmConfig;
|
||||
await update({ ...project, litellm_config: { ...current, model_id: litellmModelId || null } });
|
||||
} catch (err) {
|
||||
console.error("Failed to update LiteLLM model ID:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const statusColor = {
|
||||
stopped: "bg-[var(--text-secondary)]",
|
||||
starting: "bg-[var(--warning)]",
|
||||
@@ -373,28 +449,18 @@ export default function ProjectCard({ project }: Props) {
|
||||
{/* Auth mode selector */}
|
||||
<div className="flex items-center gap-1 text-xs">
|
||||
<span className="text-[var(--text-secondary)] mr-1">Auth:</span>
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); handleAuthModeChange("anthropic"); }}
|
||||
<select
|
||||
value={project.auth_mode}
|
||||
onChange={(e) => { e.stopPropagation(); handleAuthModeChange(e.target.value as AuthMode); }}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
disabled={!isStopped}
|
||||
className={`px-2 py-0.5 rounded transition-colors ${
|
||||
project.auth_mode === "anthropic"
|
||||
? "bg-[var(--accent)] text-white"
|
||||
: "text-[var(--text-secondary)] hover:text-[var(--text-primary)] hover:bg-[var(--bg-primary)]"
|
||||
} disabled:opacity-50`}
|
||||
className="px-2 py-0.5 rounded bg-[var(--bg-primary)] border border-[var(--border-color)] text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50"
|
||||
>
|
||||
Anthropic
|
||||
</button>
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); handleAuthModeChange("bedrock"); }}
|
||||
disabled={!isStopped}
|
||||
className={`px-2 py-0.5 rounded transition-colors ${
|
||||
project.auth_mode === "bedrock"
|
||||
? "bg-[var(--accent)] text-white"
|
||||
: "text-[var(--text-secondary)] hover:text-[var(--text-primary)] hover:bg-[var(--bg-primary)]"
|
||||
} disabled:opacity-50`}
|
||||
>
|
||||
Bedrock
|
||||
</button>
|
||||
<option value="anthropic">Anthropic</option>
|
||||
<option value="bedrock">Bedrock</option>
|
||||
<option value="ollama">Ollama</option>
|
||||
<option value="lit_llm">LiteLLM</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Action buttons */}
|
||||
@@ -738,20 +804,17 @@ export default function ProjectCard({ project }: Props) {
|
||||
{/* Sub-method selector */}
|
||||
<div className="flex items-center gap-1 text-xs">
|
||||
<span className="text-[var(--text-secondary)] mr-1">Method:</span>
|
||||
{(["static_credentials", "profile", "bearer_token"] as BedrockAuthMethod[]).map((m) => (
|
||||
<button
|
||||
key={m}
|
||||
onClick={() => updateBedrockConfig({ auth_method: m })}
|
||||
disabled={!isStopped}
|
||||
className={`px-2 py-0.5 rounded transition-colors ${
|
||||
bc.auth_method === m
|
||||
? "bg-[var(--accent)] text-white"
|
||||
: "text-[var(--text-secondary)] hover:text-[var(--text-primary)] hover:bg-[var(--bg-primary)]"
|
||||
} disabled:opacity-50`}
|
||||
>
|
||||
{m === "static_credentials" ? "Keys" : m === "profile" ? "Profile" : "Token"}
|
||||
</button>
|
||||
))}
|
||||
<select
|
||||
value={bc.auth_method}
|
||||
onChange={(e) => updateBedrockConfig({ auth_method: e.target.value as BedrockAuthMethod })}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
disabled={!isStopped}
|
||||
className="px-2 py-0.5 rounded bg-[var(--bg-primary)] border border-[var(--border-color)] text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50"
|
||||
>
|
||||
<option value="static_credentials">Keys</option>
|
||||
<option value="profile">Profile</option>
|
||||
<option value="bearer_token">Token</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* AWS Region (always shown) */}
|
||||
@@ -851,6 +914,99 @@ export default function ProjectCard({ project }: Props) {
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
|
||||
{/* Ollama config */}
|
||||
{project.auth_mode === "ollama" && (() => {
|
||||
const inputCls = "w-full px-2 py-1 bg-[var(--bg-primary)] border border-[var(--border-color)] rounded text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50";
|
||||
return (
|
||||
<div className="space-y-2 pt-1 border-t border-[var(--border-color)]">
|
||||
<label className="block text-xs font-medium text-[var(--text-primary)]">Ollama</label>
|
||||
<p className="text-xs text-[var(--text-secondary)]">
|
||||
Connect to an Ollama server running locally or on a remote host.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Base URL</label>
|
||||
<input
|
||||
value={ollamaBaseUrl}
|
||||
onChange={(e) => setOllamaBaseUrl(e.target.value)}
|
||||
onBlur={handleOllamaBaseUrlBlur}
|
||||
placeholder="http://host.docker.internal:11434"
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
/>
|
||||
<p className="text-xs text-[var(--text-secondary)] mt-0.5 opacity-70">
|
||||
Use host.docker.internal for the host machine, or an IP/hostname for remote.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Model (optional)</label>
|
||||
<input
|
||||
value={ollamaModelId}
|
||||
onChange={(e) => setOllamaModelId(e.target.value)}
|
||||
onBlur={handleOllamaModelIdBlur}
|
||||
placeholder="qwen3.5:27b"
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
|
||||
{/* LiteLLM config */}
|
||||
{project.auth_mode === "lit_llm" && (() => {
|
||||
const inputCls = "w-full px-2 py-1 bg-[var(--bg-primary)] border border-[var(--border-color)] rounded text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50";
|
||||
return (
|
||||
<div className="space-y-2 pt-1 border-t border-[var(--border-color)]">
|
||||
<label className="block text-xs font-medium text-[var(--text-primary)]">LiteLLM Gateway</label>
|
||||
<p className="text-xs text-[var(--text-secondary)]">
|
||||
Connect through a LiteLLM proxy to use 100+ model providers.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Base URL</label>
|
||||
<input
|
||||
value={litellmBaseUrl}
|
||||
onChange={(e) => setLitellmBaseUrl(e.target.value)}
|
||||
onBlur={handleLitellmBaseUrlBlur}
|
||||
placeholder="http://host.docker.internal:4000"
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
/>
|
||||
<p className="text-xs text-[var(--text-secondary)] mt-0.5 opacity-70">
|
||||
Use host.docker.internal for local, or a URL for remote/containerized LiteLLM.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">API Key</label>
|
||||
<input
|
||||
type="password"
|
||||
value={litellmApiKey}
|
||||
onChange={(e) => setLitellmApiKey(e.target.value)}
|
||||
onBlur={handleLitellmApiKeyBlur}
|
||||
placeholder="sk-..."
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className="block text-xs text-[var(--text-secondary)] mb-0.5">Model (optional)</label>
|
||||
<input
|
||||
value={litellmModelId}
|
||||
onChange={(e) => setLitellmModelId(e.target.value)}
|
||||
onBlur={handleLitellmModelIdBlur}
|
||||
placeholder="gpt-4o / gemini-pro / etc."
|
||||
disabled={!isStopped}
|
||||
className={inputCls}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -22,6 +22,8 @@ export interface Project {
|
||||
status: ProjectStatus;
|
||||
auth_mode: AuthMode;
|
||||
bedrock_config: BedrockConfig | null;
|
||||
ollama_config: OllamaConfig | null;
|
||||
litellm_config: LiteLlmConfig | null;
|
||||
allow_docker_access: boolean;
|
||||
mission_control_enabled: boolean;
|
||||
ssh_key_path: string | null;
|
||||
@@ -43,7 +45,7 @@ export type ProjectStatus =
|
||||
| "stopping"
|
||||
| "error";
|
||||
|
||||
export type AuthMode = "anthropic" | "bedrock";
|
||||
export type AuthMode = "anthropic" | "bedrock" | "ollama" | "lit_llm";
|
||||
|
||||
export type BedrockAuthMethod = "static_credentials" | "profile" | "bearer_token";
|
||||
|
||||
@@ -59,6 +61,17 @@ export interface BedrockConfig {
|
||||
disable_prompt_caching: boolean;
|
||||
}
|
||||
|
||||
export interface OllamaConfig {
|
||||
base_url: string;
|
||||
model_id: string | null;
|
||||
}
|
||||
|
||||
export interface LiteLlmConfig {
|
||||
base_url: string;
|
||||
api_key: string | null;
|
||||
model_id: string | null;
|
||||
}
|
||||
|
||||
export interface ContainerInfo {
|
||||
container_id: string;
|
||||
project_id: string;
|
||||
|
||||
Reference in New Issue
Block a user