Add Ollama and LiteLLM backend support (v0.2.0)
All checks were successful
Build App / build-macos (push) Successful in 2m25s
Build App / build-windows (push) Successful in 3m27s
Build App / build-linux (push) Successful in 4m31s
Build App / sync-to-github (push) Successful in 9s

Add two new auth modes for projects alongside Anthropic and Bedrock:
- Ollama: connect to local or remote Ollama servers via ANTHROPIC_BASE_URL
- LiteLLM: connect through a LiteLLM proxy gateway to 100+ model providers

Both modes inject ANTHROPIC_BASE_URL and ANTHROPIC_AUTH_TOKEN env vars into
the container, with optional model override via ANTHROPIC_MODEL. LiteLLM
API keys are stored securely in the OS keychain. Config changes trigger
automatic container recreation via fingerprinting.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-11 13:05:52 -07:00
parent 2dce2993cc
commit 93deab68a7
10 changed files with 337 additions and 7 deletions

View File

@@ -4668,7 +4668,7 @@ dependencies = [
[[package]]
name = "triple-c"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"bollard",
"chrono",

View File

@@ -1,6 +1,6 @@
[package]
name = "triple-c"
version = "0.1.0"
version = "0.2.0"
edition = "2021"
[lib]

View File

@@ -34,6 +34,11 @@ fn store_secrets_for_project(project: &Project) -> Result<(), String> {
secure::store_project_secret(&project.id, "aws-bearer-token", v)?;
}
}
if let Some(ref litellm) = project.litellm_config {
if let Some(ref v) = litellm.api_key {
secure::store_project_secret(&project.id, "litellm-api-key", v)?;
}
}
Ok(())
}
@@ -51,6 +56,10 @@ fn load_secrets_for_project(project: &mut Project) {
bedrock.aws_bearer_token = secure::get_project_secret(&project.id, "aws-bearer-token")
.unwrap_or(None);
}
if let Some(ref mut litellm) = project.litellm_config {
litellm.api_key = secure::get_project_secret(&project.id, "litellm-api-key")
.unwrap_or(None);
}
}
/// Resolve enabled MCP servers and filter to Docker-only ones.
@@ -180,6 +189,22 @@ pub async fn start_project_container(
}
}
if project.auth_mode == AuthMode::Ollama {
let ollama = project.ollama_config.as_ref()
.ok_or_else(|| "Ollama auth mode selected but no Ollama configuration found.".to_string())?;
if ollama.base_url.is_empty() {
return Err("Ollama base URL is required.".to_string());
}
}
if project.auth_mode == AuthMode::LiteLlm {
let litellm = project.litellm_config.as_ref()
.ok_or_else(|| "LiteLLM auth mode selected but no LiteLLM configuration found.".to_string())?;
if litellm.base_url.is_empty() {
return Err("LiteLLM base URL is required.".to_string());
}
}
// Update status to starting
state.projects_store.update_status(&project_id, ProjectStatus::Starting)?;

View File

@@ -231,6 +231,33 @@ fn compute_bedrock_fingerprint(project: &Project) -> String {
}
}
/// Compute a fingerprint for the Ollama configuration so we can detect changes.
fn compute_ollama_fingerprint(project: &Project) -> String {
if let Some(ref ollama) = project.ollama_config {
let parts = vec![
ollama.base_url.clone(),
ollama.model_id.as_deref().unwrap_or("").to_string(),
];
sha256_hex(&parts.join("|"))
} else {
String::new()
}
}
/// Compute a fingerprint for the LiteLLM configuration so we can detect changes.
fn compute_litellm_fingerprint(project: &Project) -> String {
if let Some(ref litellm) = project.litellm_config {
let parts = vec![
litellm.base_url.clone(),
litellm.api_key.as_deref().unwrap_or("").to_string(),
litellm.model_id.as_deref().unwrap_or("").to_string(),
];
sha256_hex(&parts.join("|"))
} else {
String::new()
}
}
/// Compute a fingerprint for the project paths so we can detect changes.
/// Sorted by mount_name so order changes don't cause spurious recreation.
fn compute_paths_fingerprint(paths: &[ProjectPath]) -> String {
@@ -478,6 +505,30 @@ pub async fn create_container(
}
}
// Ollama configuration
if project.auth_mode == AuthMode::Ollama {
if let Some(ref ollama) = project.ollama_config {
env_vars.push(format!("ANTHROPIC_BASE_URL={}", ollama.base_url));
env_vars.push("ANTHROPIC_AUTH_TOKEN=ollama".to_string());
if let Some(ref model) = ollama.model_id {
env_vars.push(format!("ANTHROPIC_MODEL={}", model));
}
}
}
// LiteLLM configuration
if project.auth_mode == AuthMode::LiteLlm {
if let Some(ref litellm) = project.litellm_config {
env_vars.push(format!("ANTHROPIC_BASE_URL={}", litellm.base_url));
if let Some(ref key) = litellm.api_key {
env_vars.push(format!("ANTHROPIC_AUTH_TOKEN={}", key));
}
if let Some(ref model) = litellm.model_id {
env_vars.push(format!("ANTHROPIC_MODEL={}", model));
}
}
}
// Custom environment variables (global + per-project, project overrides global for same key)
let merged_env = merge_custom_env_vars(global_custom_env_vars, &project.custom_env_vars);
let reserved_prefixes = ["ANTHROPIC_", "AWS_", "GIT_", "HOST_", "CLAUDE_", "TRIPLE_C_"];
@@ -646,6 +697,8 @@ pub async fn create_container(
labels.insert("triple-c.auth-mode".to_string(), format!("{:?}", project.auth_mode));
labels.insert("triple-c.paths-fingerprint".to_string(), compute_paths_fingerprint(&project.paths));
labels.insert("triple-c.bedrock-fingerprint".to_string(), compute_bedrock_fingerprint(project));
labels.insert("triple-c.ollama-fingerprint".to_string(), compute_ollama_fingerprint(project));
labels.insert("triple-c.litellm-fingerprint".to_string(), compute_litellm_fingerprint(project));
labels.insert("triple-c.ports-fingerprint".to_string(), compute_ports_fingerprint(&project.port_mappings));
labels.insert("triple-c.image".to_string(), image_name.to_string());
labels.insert("triple-c.timezone".to_string(), timezone.unwrap_or("").to_string());
@@ -885,6 +938,22 @@ pub async fn container_needs_recreation(
return Ok(true);
}
// ── Ollama config fingerprint ────────────────────────────────────────
let expected_ollama_fp = compute_ollama_fingerprint(project);
let container_ollama_fp = get_label("triple-c.ollama-fingerprint").unwrap_or_default();
if container_ollama_fp != expected_ollama_fp {
log::info!("Ollama config mismatch");
return Ok(true);
}
// ── LiteLLM config fingerprint ───────────────────────────────────────
let expected_litellm_fp = compute_litellm_fingerprint(project);
let container_litellm_fp = get_label("triple-c.litellm-fingerprint").unwrap_or_default();
if container_litellm_fp != expected_litellm_fp {
log::info!("LiteLLM config mismatch");
return Ok(true);
}
// ── Image ────────────────────────────────────────────────────────────
// The image label is set at creation time; if the user changed the
// configured image we need to recreate. We only compare when the

View File

@@ -33,6 +33,8 @@ pub struct Project {
pub status: ProjectStatus,
pub auth_mode: AuthMode,
pub bedrock_config: Option<BedrockConfig>,
pub ollama_config: Option<OllamaConfig>,
pub litellm_config: Option<LiteLlmConfig>,
pub allow_docker_access: bool,
#[serde(default)]
pub mission_control_enabled: bool,
@@ -74,6 +76,9 @@ pub enum AuthMode {
#[serde(alias = "login", alias = "api_key")]
Anthropic,
Bedrock,
Ollama,
#[serde(alias = "litellm")]
LiteLlm,
}
impl Default for AuthMode {
@@ -115,6 +120,29 @@ pub struct BedrockConfig {
pub disable_prompt_caching: bool,
}
/// Ollama configuration for a project.
/// Ollama exposes an Anthropic-compatible API endpoint.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OllamaConfig {
/// The base URL of the Ollama server (e.g., "http://host.docker.internal:11434" or "http://192.168.1.100:11434")
pub base_url: String,
/// Optional model override (e.g., "qwen3.5:27b")
pub model_id: Option<String>,
}
/// LiteLLM gateway configuration for a project.
/// LiteLLM translates Anthropic API calls to 100+ model providers.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LiteLlmConfig {
/// The base URL of the LiteLLM proxy (e.g., "http://host.docker.internal:4000" or "https://litellm.example.com")
pub base_url: String,
/// API key for the LiteLLM proxy
#[serde(skip_serializing, default)]
pub api_key: Option<String>,
/// Optional model override
pub model_id: Option<String>,
}
impl Project {
pub fn new(name: String, paths: Vec<ProjectPath>) -> Self {
let now = chrono::Utc::now().to_rfc3339();
@@ -126,6 +154,8 @@ impl Project {
status: ProjectStatus::Stopped,
auth_mode: AuthMode::default(),
bedrock_config: None,
ollama_config: None,
litellm_config: None,
allow_docker_access: false,
mission_control_enabled: false,
ssh_key_path: None,

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-cli/schema.json",
"productName": "Triple-C",
"version": "0.1.0",
"version": "0.2.0",
"identifier": "com.triple-c.desktop",
"build": {
"beforeDevCommand": "npm run dev",