diff --git a/CLAUDE.md b/CLAUDE.md index 1da9e4a..4c4281c 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -72,7 +72,7 @@ docker exec stdout → tokio task → emit("terminal-output-{sessionId}") → li - `container.rs` — Container lifecycle (create, start, stop, remove, inspect) - `exec.rs` — PTY exec sessions with bidirectional stdin/stdout streaming - `image.rs` — Image build/pull with progress streaming -- **`models/`** — Serde structs (`Project`, `AuthMode`, `BedrockConfig`, `ContainerInfo`, `AppSettings`). These define the IPC contract with the frontend. +- **`models/`** — Serde structs (`Project`, `AuthMode`, `BedrockConfig`, `OllamaConfig`, `LiteLlmConfig`, `ContainerInfo`, `AppSettings`). These define the IPC contract with the frontend. - **`storage/`** — Persistence: `projects_store.rs` (JSON file with atomic writes), `secure.rs` (OS keychain via `keyring` crate), `settings_store.rs` ### Container (`container/`) @@ -90,6 +90,8 @@ Containers use a **stop/start** model (not create/destroy). Installed packages p Per-project, independently configured: - **Anthropic (OAuth)** — `claude login` in terminal, token persists in config volume - **AWS Bedrock** — Static keys, profile, or bearer token injected as env vars +- **Ollama** — Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`) +- **LiteLLM** — Connect through a LiteLLM proxy gateway via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN` to access 100+ model providers ## Styling diff --git a/app/package.json b/app/package.json index c9ae2c6..6d320ff 100644 --- a/app/package.json +++ b/app/package.json @@ -1,7 +1,7 @@ { "name": "triple-c", "private": true, - "version": "0.1.0", + "version": "0.2.0", "type": "module", "scripts": { "dev": "vite", diff --git a/app/src-tauri/Cargo.lock b/app/src-tauri/Cargo.lock index 610dca3..03f97ba 100644 --- a/app/src-tauri/Cargo.lock +++ b/app/src-tauri/Cargo.lock @@ -4668,7 +4668,7 @@ dependencies = [ [[package]] name = "triple-c" -version = "0.1.0" +version = "0.2.0" dependencies = [ "bollard", "chrono", diff --git a/app/src-tauri/Cargo.toml b/app/src-tauri/Cargo.toml index 2bb596b..f13ae16 100644 --- a/app/src-tauri/Cargo.toml +++ b/app/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "triple-c" -version = "0.1.0" +version = "0.2.0" edition = "2021" [lib] diff --git a/app/src-tauri/src/commands/project_commands.rs b/app/src-tauri/src/commands/project_commands.rs index 326fd34..636a6d5 100644 --- a/app/src-tauri/src/commands/project_commands.rs +++ b/app/src-tauri/src/commands/project_commands.rs @@ -34,6 +34,11 @@ fn store_secrets_for_project(project: &Project) -> Result<(), String> { secure::store_project_secret(&project.id, "aws-bearer-token", v)?; } } + if let Some(ref litellm) = project.litellm_config { + if let Some(ref v) = litellm.api_key { + secure::store_project_secret(&project.id, "litellm-api-key", v)?; + } + } Ok(()) } @@ -51,6 +56,10 @@ fn load_secrets_for_project(project: &mut Project) { bedrock.aws_bearer_token = secure::get_project_secret(&project.id, "aws-bearer-token") .unwrap_or(None); } + if let Some(ref mut litellm) = project.litellm_config { + litellm.api_key = secure::get_project_secret(&project.id, "litellm-api-key") + .unwrap_or(None); + } } /// Resolve enabled MCP servers and filter to Docker-only ones. @@ -180,6 +189,22 @@ pub async fn start_project_container( } } + if project.auth_mode == AuthMode::Ollama { + let ollama = project.ollama_config.as_ref() + .ok_or_else(|| "Ollama auth mode selected but no Ollama configuration found.".to_string())?; + if ollama.base_url.is_empty() { + return Err("Ollama base URL is required.".to_string()); + } + } + + if project.auth_mode == AuthMode::LiteLlm { + let litellm = project.litellm_config.as_ref() + .ok_or_else(|| "LiteLLM auth mode selected but no LiteLLM configuration found.".to_string())?; + if litellm.base_url.is_empty() { + return Err("LiteLLM base URL is required.".to_string()); + } + } + // Update status to starting state.projects_store.update_status(&project_id, ProjectStatus::Starting)?; diff --git a/app/src-tauri/src/docker/container.rs b/app/src-tauri/src/docker/container.rs index 7d2a41d..44f35a4 100644 --- a/app/src-tauri/src/docker/container.rs +++ b/app/src-tauri/src/docker/container.rs @@ -231,6 +231,33 @@ fn compute_bedrock_fingerprint(project: &Project) -> String { } } +/// Compute a fingerprint for the Ollama configuration so we can detect changes. +fn compute_ollama_fingerprint(project: &Project) -> String { + if let Some(ref ollama) = project.ollama_config { + let parts = vec![ + ollama.base_url.clone(), + ollama.model_id.as_deref().unwrap_or("").to_string(), + ]; + sha256_hex(&parts.join("|")) + } else { + String::new() + } +} + +/// Compute a fingerprint for the LiteLLM configuration so we can detect changes. +fn compute_litellm_fingerprint(project: &Project) -> String { + if let Some(ref litellm) = project.litellm_config { + let parts = vec![ + litellm.base_url.clone(), + litellm.api_key.as_deref().unwrap_or("").to_string(), + litellm.model_id.as_deref().unwrap_or("").to_string(), + ]; + sha256_hex(&parts.join("|")) + } else { + String::new() + } +} + /// Compute a fingerprint for the project paths so we can detect changes. /// Sorted by mount_name so order changes don't cause spurious recreation. fn compute_paths_fingerprint(paths: &[ProjectPath]) -> String { @@ -478,6 +505,30 @@ pub async fn create_container( } } + // Ollama configuration + if project.auth_mode == AuthMode::Ollama { + if let Some(ref ollama) = project.ollama_config { + env_vars.push(format!("ANTHROPIC_BASE_URL={}", ollama.base_url)); + env_vars.push("ANTHROPIC_AUTH_TOKEN=ollama".to_string()); + if let Some(ref model) = ollama.model_id { + env_vars.push(format!("ANTHROPIC_MODEL={}", model)); + } + } + } + + // LiteLLM configuration + if project.auth_mode == AuthMode::LiteLlm { + if let Some(ref litellm) = project.litellm_config { + env_vars.push(format!("ANTHROPIC_BASE_URL={}", litellm.base_url)); + if let Some(ref key) = litellm.api_key { + env_vars.push(format!("ANTHROPIC_AUTH_TOKEN={}", key)); + } + if let Some(ref model) = litellm.model_id { + env_vars.push(format!("ANTHROPIC_MODEL={}", model)); + } + } + } + // Custom environment variables (global + per-project, project overrides global for same key) let merged_env = merge_custom_env_vars(global_custom_env_vars, &project.custom_env_vars); let reserved_prefixes = ["ANTHROPIC_", "AWS_", "GIT_", "HOST_", "CLAUDE_", "TRIPLE_C_"]; @@ -646,6 +697,8 @@ pub async fn create_container( labels.insert("triple-c.auth-mode".to_string(), format!("{:?}", project.auth_mode)); labels.insert("triple-c.paths-fingerprint".to_string(), compute_paths_fingerprint(&project.paths)); labels.insert("triple-c.bedrock-fingerprint".to_string(), compute_bedrock_fingerprint(project)); + labels.insert("triple-c.ollama-fingerprint".to_string(), compute_ollama_fingerprint(project)); + labels.insert("triple-c.litellm-fingerprint".to_string(), compute_litellm_fingerprint(project)); labels.insert("triple-c.ports-fingerprint".to_string(), compute_ports_fingerprint(&project.port_mappings)); labels.insert("triple-c.image".to_string(), image_name.to_string()); labels.insert("triple-c.timezone".to_string(), timezone.unwrap_or("").to_string()); @@ -885,6 +938,22 @@ pub async fn container_needs_recreation( return Ok(true); } + // ── Ollama config fingerprint ──────────────────────────────────────── + let expected_ollama_fp = compute_ollama_fingerprint(project); + let container_ollama_fp = get_label("triple-c.ollama-fingerprint").unwrap_or_default(); + if container_ollama_fp != expected_ollama_fp { + log::info!("Ollama config mismatch"); + return Ok(true); + } + + // ── LiteLLM config fingerprint ─────────────────────────────────────── + let expected_litellm_fp = compute_litellm_fingerprint(project); + let container_litellm_fp = get_label("triple-c.litellm-fingerprint").unwrap_or_default(); + if container_litellm_fp != expected_litellm_fp { + log::info!("LiteLLM config mismatch"); + return Ok(true); + } + // ── Image ──────────────────────────────────────────────────────────── // The image label is set at creation time; if the user changed the // configured image we need to recreate. We only compare when the diff --git a/app/src-tauri/src/models/project.rs b/app/src-tauri/src/models/project.rs index a32cbcd..27e09b3 100644 --- a/app/src-tauri/src/models/project.rs +++ b/app/src-tauri/src/models/project.rs @@ -33,6 +33,8 @@ pub struct Project { pub status: ProjectStatus, pub auth_mode: AuthMode, pub bedrock_config: Option, + pub ollama_config: Option, + pub litellm_config: Option, pub allow_docker_access: bool, #[serde(default)] pub mission_control_enabled: bool, @@ -74,6 +76,9 @@ pub enum AuthMode { #[serde(alias = "login", alias = "api_key")] Anthropic, Bedrock, + Ollama, + #[serde(alias = "litellm")] + LiteLlm, } impl Default for AuthMode { @@ -115,6 +120,29 @@ pub struct BedrockConfig { pub disable_prompt_caching: bool, } +/// Ollama configuration for a project. +/// Ollama exposes an Anthropic-compatible API endpoint. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OllamaConfig { + /// The base URL of the Ollama server (e.g., "http://host.docker.internal:11434" or "http://192.168.1.100:11434") + pub base_url: String, + /// Optional model override (e.g., "qwen3.5:27b") + pub model_id: Option, +} + +/// LiteLLM gateway configuration for a project. +/// LiteLLM translates Anthropic API calls to 100+ model providers. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LiteLlmConfig { + /// The base URL of the LiteLLM proxy (e.g., "http://host.docker.internal:4000" or "https://litellm.example.com") + pub base_url: String, + /// API key for the LiteLLM proxy + #[serde(skip_serializing, default)] + pub api_key: Option, + /// Optional model override + pub model_id: Option, +} + impl Project { pub fn new(name: String, paths: Vec) -> Self { let now = chrono::Utc::now().to_rfc3339(); @@ -126,6 +154,8 @@ impl Project { status: ProjectStatus::Stopped, auth_mode: AuthMode::default(), bedrock_config: None, + ollama_config: None, + litellm_config: None, allow_docker_access: false, mission_control_enabled: false, ssh_key_path: None, diff --git a/app/src-tauri/tauri.conf.json b/app/src-tauri/tauri.conf.json index cfae16a..c5d064a 100644 --- a/app/src-tauri/tauri.conf.json +++ b/app/src-tauri/tauri.conf.json @@ -1,7 +1,7 @@ { "$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-cli/schema.json", "productName": "Triple-C", - "version": "0.1.0", + "version": "0.2.0", "identifier": "com.triple-c.desktop", "build": { "beforeDevCommand": "npm run dev", diff --git a/app/src/components/projects/ProjectCard.tsx b/app/src/components/projects/ProjectCard.tsx index 2464986..632c364 100644 --- a/app/src/components/projects/ProjectCard.tsx +++ b/app/src/components/projects/ProjectCard.tsx @@ -1,7 +1,7 @@ import { useState, useEffect } from "react"; import { open } from "@tauri-apps/plugin-dialog"; import { listen } from "@tauri-apps/api/event"; -import type { Project, ProjectPath, AuthMode, BedrockConfig, BedrockAuthMethod } from "../../lib/types"; +import type { Project, ProjectPath, AuthMode, BedrockConfig, BedrockAuthMethod, OllamaConfig, LiteLlmConfig } from "../../lib/types"; import { useProjects } from "../../hooks/useProjects"; import { useMcpServers } from "../../hooks/useMcpServers"; import { useTerminal } from "../../hooks/useTerminal"; @@ -58,6 +58,15 @@ export default function ProjectCard({ project }: Props) { const [bedrockBearerToken, setBedrockBearerToken] = useState(project.bedrock_config?.aws_bearer_token ?? ""); const [bedrockModelId, setBedrockModelId] = useState(project.bedrock_config?.model_id ?? ""); + // Ollama local state + const [ollamaBaseUrl, setOllamaBaseUrl] = useState(project.ollama_config?.base_url ?? "http://host.docker.internal:11434"); + const [ollamaModelId, setOllamaModelId] = useState(project.ollama_config?.model_id ?? ""); + + // LiteLLM local state + const [litellmBaseUrl, setLitellmBaseUrl] = useState(project.litellm_config?.base_url ?? "http://host.docker.internal:4000"); + const [litellmApiKey, setLitellmApiKey] = useState(project.litellm_config?.api_key ?? ""); + const [litellmModelId, setLitellmModelId] = useState(project.litellm_config?.model_id ?? ""); + // Sync local state when project prop changes (e.g., after save or external update) useEffect(() => { setEditName(project.name); @@ -76,6 +85,11 @@ export default function ProjectCard({ project }: Props) { setBedrockProfile(project.bedrock_config?.aws_profile ?? ""); setBedrockBearerToken(project.bedrock_config?.aws_bearer_token ?? ""); setBedrockModelId(project.bedrock_config?.model_id ?? ""); + setOllamaBaseUrl(project.ollama_config?.base_url ?? "http://host.docker.internal:11434"); + setOllamaModelId(project.ollama_config?.model_id ?? ""); + setLitellmBaseUrl(project.litellm_config?.base_url ?? "http://host.docker.internal:4000"); + setLitellmApiKey(project.litellm_config?.api_key ?? ""); + setLitellmModelId(project.litellm_config?.model_id ?? ""); }, [project]); // Listen for container progress events @@ -177,12 +191,29 @@ export default function ProjectCard({ project }: Props) { disable_prompt_caching: false, }; + const defaultOllamaConfig: OllamaConfig = { + base_url: "http://host.docker.internal:11434", + model_id: null, + }; + + const defaultLiteLlmConfig: LiteLlmConfig = { + base_url: "http://host.docker.internal:4000", + api_key: null, + model_id: null, + }; + const handleAuthModeChange = async (mode: AuthMode) => { try { const updates: Partial = { auth_mode: mode }; if (mode === "bedrock" && !project.bedrock_config) { updates.bedrock_config = defaultBedrockConfig; } + if (mode === "ollama" && !project.ollama_config) { + updates.ollama_config = defaultOllamaConfig; + } + if (mode === "lit_llm" && !project.litellm_config) { + updates.litellm_config = defaultLiteLlmConfig; + } await update({ ...project, ...updates }); } catch (e) { setError(String(e)); @@ -305,6 +336,51 @@ export default function ProjectCard({ project }: Props) { } }; + const handleOllamaBaseUrlBlur = async () => { + try { + const current = project.ollama_config ?? defaultOllamaConfig; + await update({ ...project, ollama_config: { ...current, base_url: ollamaBaseUrl } }); + } catch (err) { + console.error("Failed to update Ollama base URL:", err); + } + }; + + const handleOllamaModelIdBlur = async () => { + try { + const current = project.ollama_config ?? defaultOllamaConfig; + await update({ ...project, ollama_config: { ...current, model_id: ollamaModelId || null } }); + } catch (err) { + console.error("Failed to update Ollama model ID:", err); + } + }; + + const handleLitellmBaseUrlBlur = async () => { + try { + const current = project.litellm_config ?? defaultLiteLlmConfig; + await update({ ...project, litellm_config: { ...current, base_url: litellmBaseUrl } }); + } catch (err) { + console.error("Failed to update LiteLLM base URL:", err); + } + }; + + const handleLitellmApiKeyBlur = async () => { + try { + const current = project.litellm_config ?? defaultLiteLlmConfig; + await update({ ...project, litellm_config: { ...current, api_key: litellmApiKey || null } }); + } catch (err) { + console.error("Failed to update LiteLLM API key:", err); + } + }; + + const handleLitellmModelIdBlur = async () => { + try { + const current = project.litellm_config ?? defaultLiteLlmConfig; + await update({ ...project, litellm_config: { ...current, model_id: litellmModelId || null } }); + } catch (err) { + console.error("Failed to update LiteLLM model ID:", err); + } + }; + const statusColor = { stopped: "bg-[var(--text-secondary)]", starting: "bg-[var(--warning)]", @@ -395,6 +471,28 @@ export default function ProjectCard({ project }: Props) { > Bedrock + + {/* Action buttons */} @@ -851,6 +949,99 @@ export default function ProjectCard({ project }: Props) { ); })()} + + {/* Ollama config */} + {project.auth_mode === "ollama" && (() => { + const inputCls = "w-full px-2 py-1 bg-[var(--bg-primary)] border border-[var(--border-color)] rounded text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50"; + return ( +
+ +

+ Connect to an Ollama server running locally or on a remote host. +

+ +
+ + setOllamaBaseUrl(e.target.value)} + onBlur={handleOllamaBaseUrlBlur} + placeholder="http://host.docker.internal:11434" + disabled={!isStopped} + className={inputCls} + /> +

+ Use host.docker.internal for the host machine, or an IP/hostname for remote. +

+
+ +
+ + setOllamaModelId(e.target.value)} + onBlur={handleOllamaModelIdBlur} + placeholder="qwen3.5:27b" + disabled={!isStopped} + className={inputCls} + /> +
+
+ ); + })()} + + {/* LiteLLM config */} + {project.auth_mode === "lit_llm" && (() => { + const inputCls = "w-full px-2 py-1 bg-[var(--bg-primary)] border border-[var(--border-color)] rounded text-xs text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent)] disabled:opacity-50"; + return ( +
+ +

+ Connect through a LiteLLM proxy to use 100+ model providers. +

+ +
+ + setLitellmBaseUrl(e.target.value)} + onBlur={handleLitellmBaseUrlBlur} + placeholder="http://host.docker.internal:4000" + disabled={!isStopped} + className={inputCls} + /> +

+ Use host.docker.internal for local, or a URL for remote/containerized LiteLLM. +

+
+ +
+ + setLitellmApiKey(e.target.value)} + onBlur={handleLitellmApiKeyBlur} + placeholder="sk-..." + disabled={!isStopped} + className={inputCls} + /> +
+ +
+ + setLitellmModelId(e.target.value)} + onBlur={handleLitellmModelIdBlur} + placeholder="gpt-4o / gemini-pro / etc." + disabled={!isStopped} + className={inputCls} + /> +
+
+ ); + })()} )} diff --git a/app/src/lib/types.ts b/app/src/lib/types.ts index 70d84ee..2ec1621 100644 --- a/app/src/lib/types.ts +++ b/app/src/lib/types.ts @@ -22,6 +22,8 @@ export interface Project { status: ProjectStatus; auth_mode: AuthMode; bedrock_config: BedrockConfig | null; + ollama_config: OllamaConfig | null; + litellm_config: LiteLlmConfig | null; allow_docker_access: boolean; mission_control_enabled: boolean; ssh_key_path: string | null; @@ -43,7 +45,7 @@ export type ProjectStatus = | "stopping" | "error"; -export type AuthMode = "anthropic" | "bedrock"; +export type AuthMode = "anthropic" | "bedrock" | "ollama" | "lit_llm"; export type BedrockAuthMethod = "static_credentials" | "profile" | "bearer_token"; @@ -59,6 +61,17 @@ export interface BedrockConfig { disable_prompt_caching: boolean; } +export interface OllamaConfig { + base_url: string; + model_id: string | null; +} + +export interface LiteLlmConfig { + base_url: string; + api_key: string | null; + model_id: string | null; +} + export interface ContainerInfo { container_id: string; project_id: string;