Rename LiteLLM backend to OpenAI Compatible
All checks were successful
Build App / compute-version (push) Successful in 8s
Build App / build-macos (push) Successful in 2m25s
Build App / build-windows (push) Successful in 4m0s
Build App / build-linux (push) Successful in 4m47s
Build App / create-tag (push) Successful in 3s
Build App / sync-to-github (push) Successful in 12s

Reflects that this backend works with any OpenAI API-compatible endpoint
(LiteLLM, OpenRouter, vLLM, text-generation-inference, LocalAI, etc.),
not just LiteLLM. Includes serde aliases for backward compatibility with
existing projects.json files.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-13 06:16:05 -07:00
parent 879322bc9a
commit d7d7a83aec
8 changed files with 100 additions and 98 deletions

View File

@@ -34,9 +34,9 @@ fn store_secrets_for_project(project: &Project) -> Result<(), String> {
secure::store_project_secret(&project.id, "aws-bearer-token", v)?;
}
}
if let Some(ref litellm) = project.litellm_config {
if let Some(ref v) = litellm.api_key {
secure::store_project_secret(&project.id, "litellm-api-key", v)?;
if let Some(ref oai_config) = project.openai_compatible_config {
if let Some(ref v) = oai_config.api_key {
secure::store_project_secret(&project.id, "openai-compatible-api-key", v)?;
}
}
Ok(())
@@ -56,8 +56,8 @@ fn load_secrets_for_project(project: &mut Project) {
bedrock.aws_bearer_token = secure::get_project_secret(&project.id, "aws-bearer-token")
.unwrap_or(None);
}
if let Some(ref mut litellm) = project.litellm_config {
litellm.api_key = secure::get_project_secret(&project.id, "litellm-api-key")
if let Some(ref mut oai_config) = project.openai_compatible_config {
oai_config.api_key = secure::get_project_secret(&project.id, "openai-compatible-api-key")
.unwrap_or(None);
}
}
@@ -197,11 +197,11 @@ pub async fn start_project_container(
}
}
if project.backend == Backend::LiteLlm {
let litellm = project.litellm_config.as_ref()
.ok_or_else(|| "LiteLLM backend selected but no LiteLLM configuration found.".to_string())?;
if litellm.base_url.is_empty() {
return Err("LiteLLM base URL is required.".to_string());
if project.backend == Backend::OpenAiCompatible {
let oai_config = project.openai_compatible_config.as_ref()
.ok_or_else(|| "OpenAI Compatible backend selected but no configuration found.".to_string())?;
if oai_config.base_url.is_empty() {
return Err("OpenAI Compatible base URL is required.".to_string());
}
}