Rename LiteLLM backend to OpenAI Compatible
All checks were successful
Build App / compute-version (push) Successful in 8s
Build App / build-macos (push) Successful in 2m25s
Build App / build-windows (push) Successful in 4m0s
Build App / build-linux (push) Successful in 4m47s
Build App / create-tag (push) Successful in 3s
Build App / sync-to-github (push) Successful in 12s

Reflects that this backend works with any OpenAI API-compatible endpoint
(LiteLLM, OpenRouter, vLLM, text-generation-inference, LocalAI, etc.),
not just LiteLLM. Includes serde aliases for backward compatibility with
existing projects.json files.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-13 06:16:05 -07:00
parent 879322bc9a
commit d7d7a83aec
8 changed files with 100 additions and 98 deletions

View File

@@ -23,7 +23,7 @@ export interface Project {
backend: Backend;
bedrock_config: BedrockConfig | null;
ollama_config: OllamaConfig | null;
litellm_config: LiteLlmConfig | null;
openai_compatible_config: OpenAiCompatibleConfig | null;
allow_docker_access: boolean;
mission_control_enabled: boolean;
ssh_key_path: string | null;
@@ -45,7 +45,7 @@ export type ProjectStatus =
| "stopping"
| "error";
export type Backend = "anthropic" | "bedrock" | "ollama" | "lite_llm";
export type Backend = "anthropic" | "bedrock" | "ollama" | "open_ai_compatible";
export type BedrockAuthMethod = "static_credentials" | "profile" | "bearer_token";
@@ -66,7 +66,7 @@ export interface OllamaConfig {
model_id: string | null;
}
export interface LiteLlmConfig {
export interface OpenAiCompatibleConfig {
base_url: string;
api_key: string | null;
model_id: string | null;