Add Ollama and LiteLLM backend support (v0.2.0)
Add two new auth modes for projects alongside Anthropic and Bedrock: - Ollama: connect to local or remote Ollama servers via ANTHROPIC_BASE_URL - LiteLLM: connect through a LiteLLM proxy gateway to 100+ model providers Both modes inject ANTHROPIC_BASE_URL and ANTHROPIC_AUTH_TOKEN env vars into the container, with optional model override via ANTHROPIC_MODEL. LiteLLM API keys are stored securely in the OS keychain. Config changes trigger automatic container recreation via fingerprinting. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -22,6 +22,8 @@ export interface Project {
|
||||
status: ProjectStatus;
|
||||
auth_mode: AuthMode;
|
||||
bedrock_config: BedrockConfig | null;
|
||||
ollama_config: OllamaConfig | null;
|
||||
litellm_config: LiteLlmConfig | null;
|
||||
allow_docker_access: boolean;
|
||||
mission_control_enabled: boolean;
|
||||
ssh_key_path: string | null;
|
||||
@@ -43,7 +45,7 @@ export type ProjectStatus =
|
||||
| "stopping"
|
||||
| "error";
|
||||
|
||||
export type AuthMode = "anthropic" | "bedrock";
|
||||
export type AuthMode = "anthropic" | "bedrock" | "ollama" | "lit_llm";
|
||||
|
||||
export type BedrockAuthMethod = "static_credentials" | "profile" | "bearer_token";
|
||||
|
||||
@@ -59,6 +61,17 @@ export interface BedrockConfig {
|
||||
disable_prompt_caching: boolean;
|
||||
}
|
||||
|
||||
export interface OllamaConfig {
|
||||
base_url: string;
|
||||
model_id: string | null;
|
||||
}
|
||||
|
||||
export interface LiteLlmConfig {
|
||||
base_url: string;
|
||||
api_key: string | null;
|
||||
model_id: string | null;
|
||||
}
|
||||
|
||||
export interface ContainerInfo {
|
||||
container_id: string;
|
||||
project_id: string;
|
||||
|
||||
Reference in New Issue
Block a user