From 5977024953f3f7509f9c011cfc522a21dae90858 Mon Sep 17 00:00:00 2001 From: Josh Knapp Date: Thu, 12 Mar 2026 12:53:24 -0700 Subject: [PATCH] Update Ollama docs and UI to mark model as required The model field must be set and the model must be pre-pulled in Ollama before the container will work. Updated README, HOW-TO-USE, and the ProjectCard UI label/tooltip to reflect this. Co-Authored-By: Claude Opus 4.6 --- HOW-TO-USE.md | 9 ++++++--- README.md | 2 +- app/src/components/projects/ProjectCard.tsx | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/HOW-TO-USE.md b/HOW-TO-USE.md index 31e26e5..be0e021 100644 --- a/HOW-TO-USE.md +++ b/HOW-TO-USE.md @@ -113,8 +113,9 @@ Claude Code launches automatically with `--dangerously-skip-permissions` inside 1. Stop the container first (settings can only be changed while stopped). 2. In the project card, switch the backend to **Ollama**. -3. Expand the **Config** panel and set the base URL of your Ollama server (defaults to `http://host.docker.internal:11434` for a local instance). Optionally set a model ID. -4. Start the container again. +3. Expand the **Config** panel and set the base URL of your Ollama server (defaults to `http://host.docker.internal:11434` for a local instance). Set the **Model ID** to the model you want to use (required). +4. Make sure the model has been pulled in Ollama (e.g., `ollama pull qwen3.5:27b`) or used via Ollama cloud before starting. +5. Start the container again. **LiteLLM:** @@ -414,7 +415,7 @@ To use Claude Code with a local or remote Ollama server, switch the backend to * ### Settings - **Base URL** — The URL of your Ollama server. Defaults to `http://host.docker.internal:11434`, which reaches a locally running Ollama instance from inside the container. For a remote server, use its IP or hostname (e.g., `http://192.168.1.100:11434`). -- **Model ID** — Optional. Override the model to use (e.g., `qwen3.5:27b`). +- **Model ID** — **Required.** The model to use (e.g., `qwen3.5:27b`). The model must be pulled in Ollama before use — run `ollama pull ` or use it via Ollama cloud so it is available when the container starts. ### How It Works @@ -422,6 +423,8 @@ Triple-C sets `ANTHROPIC_BASE_URL` to point Claude Code at your Ollama server in > **Note:** Ollama support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models. +> **Important:** The model must already be available in Ollama before starting the container. If using a local Ollama instance, pull the model first with `ollama pull `. If using Ollama's cloud service, ensure the model has been used at least once so it is cached. + --- ## LiteLLM Configuration diff --git a/README.md b/README.md index 83a24c7..c992bbb 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ Each project can independently use one of: - **Anthropic** (OAuth): User runs `claude login` inside the terminal on first use. Token persisted in the config volume across restarts and resets. - **AWS Bedrock**: Per-project AWS credentials (static keys, profile, or bearer token). SSO sessions are validated before launching Claude for Profile auth. -- **Ollama**: Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`). Optional model override. +- **Ollama**: Connect to a local or remote Ollama server via `ANTHROPIC_BASE_URL` (e.g., `http://host.docker.internal:11434`). Requires a model ID, and the model must be pulled (or used via Ollama cloud) before starting the container. - **LiteLLM**: Connect through a LiteLLM proxy gateway via `ANTHROPIC_BASE_URL` + `ANTHROPIC_AUTH_TOKEN` to access 100+ model providers. API key stored securely in OS keychain. > **Note:** Ollama and LiteLLM support is best-effort. Claude Code is designed for Anthropic models, so some features (tool use, extended thinking, prompt caching, etc.) may not work as expected with non-Anthropic models behind these backends. diff --git a/app/src/components/projects/ProjectCard.tsx b/app/src/components/projects/ProjectCard.tsx index e19a24f..1a8efd3 100644 --- a/app/src/components/projects/ProjectCard.tsx +++ b/app/src/components/projects/ProjectCard.tsx @@ -942,7 +942,7 @@ export default function ProjectCard({ project }: Props) {
- + setOllamaModelId(e.target.value)}