Add app update detection and multi-folder project support
All checks were successful
Build App / build-linux (push) Successful in 2m54s
Build App / build-windows (push) Successful in 4m18s
Build Container / build-container (push) Successful in 1m30s

Feature 1 - Update Detection: Query Gitea releases API on startup (3s
delay) and every 24h, compare patch versions by platform, show pulsing
"Update" button in TopBar with dialog for release notes/downloads.
Settings: auto-check toggle, manual check, dismiss per-version.

Feature 2 - Multi-Folder Projects: Replace single `path` with
`paths: Vec<ProjectPath>` (host_path + mount_name). Each folder mounts
to `/workspace/{mount_name}`. Auto-migrate old single-path JSON on load.
Container recreation via paths-fingerprint label. AddProjectDialog and
ProjectCard support add/remove/edit of multiple folders.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-28 21:18:33 +00:00
parent 854f59a95a
commit 7e1cc92aa4
23 changed files with 1163 additions and 98 deletions

View File

@@ -2,3 +2,4 @@ pub mod docker_commands;
pub mod project_commands;
pub mod settings_commands;
pub mod terminal_commands;
pub mod update_commands;

View File

@@ -1,7 +1,7 @@
use tauri::State;
use crate::docker;
use crate::models::{container_config, AuthMode, Project, ProjectStatus};
use crate::models::{container_config, AuthMode, Project, ProjectPath, ProjectStatus};
use crate::storage::secure;
use crate::AppState;
@@ -51,10 +51,26 @@ pub async fn list_projects(state: State<'_, AppState>) -> Result<Vec<Project>, S
#[tauri::command]
pub async fn add_project(
name: String,
path: String,
paths: Vec<ProjectPath>,
state: State<'_, AppState>,
) -> Result<Project, String> {
let project = Project::new(name, path);
// Validate paths
if paths.is_empty() {
return Err("At least one folder path is required.".to_string());
}
let mut seen_names = std::collections::HashSet::new();
for p in &paths {
if p.mount_name.is_empty() {
return Err("Mount name cannot be empty.".to_string());
}
if !p.mount_name.chars().all(|c| c.is_alphanumeric() || c == '-' || c == '_' || c == '.') {
return Err(format!("Mount name '{}' contains invalid characters. Use alphanumeric, dash, underscore, or dot.", p.mount_name));
}
if !seen_names.insert(p.mount_name.clone()) {
return Err(format!("Duplicate mount name '{}'.", p.mount_name));
}
}
let project = Project::new(name, paths);
store_secrets_for_project(&project)?;
state.projects_store.add(project)
}

View File

@@ -0,0 +1,117 @@
use crate::models::{GiteaRelease, ReleaseAsset, UpdateInfo};
const RELEASES_URL: &str =
"https://repo.anhonesthost.net/api/v1/repos/cybercovellc/triple-c/releases";
#[tauri::command]
pub fn get_app_version() -> String {
env!("CARGO_PKG_VERSION").to_string()
}
#[tauri::command]
pub async fn check_for_updates() -> Result<Option<UpdateInfo>, String> {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(15))
.build()
.map_err(|e| format!("Failed to create HTTP client: {}", e))?;
let releases: Vec<GiteaRelease> = client
.get(RELEASES_URL)
.header("Accept", "application/json")
.send()
.await
.map_err(|e| format!("Failed to fetch releases: {}", e))?
.json()
.await
.map_err(|e| format!("Failed to parse releases: {}", e))?;
let current_version = env!("CARGO_PKG_VERSION");
let is_windows = cfg!(target_os = "windows");
// Filter releases by platform tag suffix
let platform_releases: Vec<&GiteaRelease> = releases
.iter()
.filter(|r| {
if is_windows {
r.tag_name.ends_with("-win")
} else {
!r.tag_name.ends_with("-win")
}
})
.collect();
// Find the latest release with a higher patch version
// Version format: 0.1.X or v0.1.X (tag may have prefix/suffix)
let current_patch = parse_patch_version(current_version).unwrap_or(0);
let mut best: Option<(&GiteaRelease, u32)> = None;
for release in &platform_releases {
if let Some(patch) = parse_patch_from_tag(&release.tag_name) {
if patch > current_patch {
if best.is_none() || patch > best.unwrap().1 {
best = Some((release, patch));
}
}
}
}
match best {
Some((release, _)) => {
let assets = release
.assets
.iter()
.map(|a| ReleaseAsset {
name: a.name.clone(),
browser_download_url: a.browser_download_url.clone(),
size: a.size,
})
.collect();
// Reconstruct version string from tag
let version = extract_version_from_tag(&release.tag_name)
.unwrap_or_else(|| release.tag_name.clone());
Ok(Some(UpdateInfo {
version,
tag_name: release.tag_name.clone(),
release_url: release.html_url.clone(),
body: release.body.clone(),
assets,
published_at: release.published_at.clone(),
}))
}
None => Ok(None),
}
}
/// Parse patch version from a semver string like "0.1.5" -> 5
fn parse_patch_version(version: &str) -> Option<u32> {
let clean = version.trim_start_matches('v');
let parts: Vec<&str> = clean.split('.').collect();
if parts.len() >= 3 {
parts[2].parse().ok()
} else {
None
}
}
/// Parse patch version from a tag like "v0.1.5", "v0.1.5-win", "0.1.5" -> 5
fn parse_patch_from_tag(tag: &str) -> Option<u32> {
let clean = tag.trim_start_matches('v');
// Remove platform suffix
let clean = clean.strip_suffix("-win").unwrap_or(clean);
parse_patch_version(clean)
}
/// Extract a clean version string from a tag like "v0.1.5-win" -> "0.1.5"
fn extract_version_from_tag(tag: &str) -> Option<String> {
let clean = tag.trim_start_matches('v');
let clean = clean.strip_suffix("-win").unwrap_or(clean);
// Validate it looks like a version
let parts: Vec<&str> = clean.split('.').collect();
if parts.len() >= 3 && parts.iter().all(|p| p.parse::<u32>().is_ok()) {
Some(clean.to_string())
} else {
None
}
}

View File

@@ -8,7 +8,7 @@ use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use super::client::get_docker;
use crate::models::{AuthMode, BedrockAuthMethod, ContainerInfo, EnvVar, GlobalAwsSettings, Project};
use crate::models::{AuthMode, BedrockAuthMethod, ContainerInfo, EnvVar, GlobalAwsSettings, Project, ProjectPath};
/// Compute a fingerprint string for the custom environment variables.
/// Sorted alphabetically so order changes do not cause spurious recreation.
@@ -62,6 +62,20 @@ fn compute_bedrock_fingerprint(project: &Project) -> String {
}
}
/// Compute a fingerprint for the project paths so we can detect changes.
/// Sorted by mount_name so order changes don't cause spurious recreation.
fn compute_paths_fingerprint(paths: &[ProjectPath]) -> String {
let mut parts: Vec<String> = paths
.iter()
.map(|p| format!("{}:{}", p.mount_name, p.host_path))
.collect();
parts.sort();
let joined = parts.join(",");
let mut hasher = DefaultHasher::new();
joined.hash(&mut hasher);
format!("{:x}", hasher.finish())
}
pub async fn find_existing_container(project: &Project) -> Result<Option<String>, String> {
let docker = get_docker()?;
let container_name = project.container_name();
@@ -231,24 +245,27 @@ pub async fn create_container(
env_vars.push(format!("CLAUDE_INSTRUCTIONS={}", instructions));
}
let mut mounts = vec![
// Project directory -> /workspace
Mount {
target: Some("/workspace".to_string()),
source: Some(project.path.clone()),
let mut mounts: Vec<Mount> = Vec::new();
// Project directories -> /workspace/{mount_name}
for pp in &project.paths {
mounts.push(Mount {
target: Some(format!("/workspace/{}", pp.mount_name)),
source: Some(pp.host_path.clone()),
typ: Some(MountTypeEnum::BIND),
read_only: Some(false),
..Default::default()
},
// Named volume for claude config persistence
Mount {
target: Some("/home/claude/.claude".to_string()),
source: Some(format!("triple-c-claude-config-{}", project.id)),
typ: Some(MountTypeEnum::VOLUME),
read_only: Some(false),
..Default::default()
},
];
});
}
// Named volume for claude config persistence
mounts.push(Mount {
target: Some("/home/claude/.claude".to_string()),
source: Some(format!("triple-c-claude-config-{}", project.id)),
typ: Some(MountTypeEnum::VOLUME),
read_only: Some(false),
..Default::default()
});
// SSH keys mount (read-only staging; entrypoint copies to ~/.ssh with correct perms)
if let Some(ref ssh_path) = project.ssh_key_path {
@@ -315,7 +332,7 @@ pub async fn create_container(
labels.insert("triple-c.project-id".to_string(), project.id.clone());
labels.insert("triple-c.project-name".to_string(), project.name.clone());
labels.insert("triple-c.auth-mode".to_string(), format!("{:?}", project.auth_mode));
labels.insert("triple-c.project-path".to_string(), project.path.clone());
labels.insert("triple-c.paths-fingerprint".to_string(), compute_paths_fingerprint(&project.paths));
labels.insert("triple-c.bedrock-fingerprint".to_string(), compute_bedrock_fingerprint(project));
labels.insert("triple-c.image".to_string(), image_name.to_string());
@@ -324,12 +341,18 @@ pub async fn create_container(
..Default::default()
};
let working_dir = if project.paths.len() == 1 {
format!("/workspace/{}", project.paths[0].mount_name)
} else {
"/workspace".to_string()
};
let config = Config {
image: Some(image_name.to_string()),
hostname: Some("triple-c".to_string()),
env: Some(env_vars),
labels: Some(labels),
working_dir: Some("/workspace".to_string()),
working_dir: Some(working_dir),
host_config: Some(host_config),
tty: Some(true),
..Default::default()
@@ -425,10 +448,18 @@ pub async fn container_needs_recreation(
}
}
// ── Project path ─────────────────────────────────────────────────────
if let Some(container_path) = get_label("triple-c.project-path") {
if container_path != project.path {
log::info!("Project path mismatch (container={:?}, project={:?})", container_path, project.path);
// ── Project paths fingerprint ──────────────────────────────────────────
let expected_paths_fp = compute_paths_fingerprint(&project.paths);
match get_label("triple-c.paths-fingerprint") {
Some(container_fp) => {
if container_fp != expected_paths_fp {
log::info!("Paths fingerprint mismatch (container={:?}, expected={:?})", container_fp, expected_paths_fp);
return Ok(true);
}
}
None => {
// Old container without paths-fingerprint label -> force recreation for migration
log::info!("Container missing paths-fingerprint label, triggering recreation for migration");
return Ok(true);
}
}

View File

@@ -63,6 +63,9 @@ pub fn run() {
commands::terminal_commands::terminal_input,
commands::terminal_commands::terminal_resize,
commands::terminal_commands::close_terminal_session,
// Updates
commands::update_commands::get_app_version,
commands::update_commands::check_for_updates,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View File

@@ -1,5 +1,9 @@
use serde::{Deserialize, Serialize};
fn default_true() -> bool {
true
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ImageSource {
@@ -52,6 +56,10 @@ pub struct AppSettings {
pub global_aws: GlobalAwsSettings,
#[serde(default)]
pub global_claude_instructions: Option<String>,
#[serde(default = "default_true")]
pub auto_check_updates: bool,
#[serde(default)]
pub dismissed_update_version: Option<String>,
}
impl Default for AppSettings {
@@ -65,6 +73,8 @@ impl Default for AppSettings {
custom_image_name: None,
global_aws: GlobalAwsSettings::default(),
global_claude_instructions: None,
auto_check_updates: true,
dismissed_update_version: None,
}
}
}

View File

@@ -1,7 +1,9 @@
pub mod project;
pub mod container_config;
pub mod app_settings;
pub mod update_info;
pub use project::*;
pub use container_config::*;
pub use app_settings::*;
pub use update_info::*;

View File

@@ -6,11 +6,17 @@ pub struct EnvVar {
pub value: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ProjectPath {
pub host_path: String,
pub mount_name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Project {
pub id: String,
pub name: String,
pub path: String,
pub paths: Vec<ProjectPath>,
pub container_id: Option<String>,
pub status: ProjectStatus,
pub auth_mode: AuthMode,
@@ -91,12 +97,12 @@ pub struct BedrockConfig {
}
impl Project {
pub fn new(name: String, path: String) -> Self {
pub fn new(name: String, paths: Vec<ProjectPath>) -> Self {
let now = chrono::Utc::now().to_rfc3339();
Self {
id: uuid::Uuid::new_v4().to_string(),
name,
path,
paths,
container_id: None,
status: ProjectStatus::Stopped,
auth_mode: AuthMode::default(),
@@ -116,4 +122,29 @@ impl Project {
pub fn container_name(&self) -> String {
format!("triple-c-{}", self.id)
}
/// Migrate a project JSON value from old single-`path` format to new `paths` format.
/// If the value already has `paths`, it is returned unchanged.
pub fn migrate_from_value(mut val: serde_json::Value) -> serde_json::Value {
if let Some(obj) = val.as_object_mut() {
if obj.contains_key("paths") {
return val;
}
if let Some(path_val) = obj.remove("path") {
let path_str = path_val.as_str().unwrap_or("").to_string();
let mount_name = path_str
.trim_end_matches(['/', '\\'])
.rsplit(['/', '\\'])
.next()
.unwrap_or("workspace")
.to_string();
let project_path = serde_json::json!([{
"host_path": path_str,
"mount_name": if mount_name.is_empty() { "workspace".to_string() } else { mount_name },
}]);
obj.insert("paths".to_string(), project_path);
}
}
val
}
}

View File

@@ -0,0 +1,37 @@
use serde::{Deserialize, Serialize};
/// Info returned to the frontend about an available update.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateInfo {
pub version: String,
pub tag_name: String,
pub release_url: String,
pub body: String,
pub assets: Vec<ReleaseAsset>,
pub published_at: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleaseAsset {
pub name: String,
pub browser_download_url: String,
pub size: u64,
}
/// Gitea API release response (internal).
#[derive(Debug, Clone, Deserialize)]
pub struct GiteaRelease {
pub tag_name: String,
pub html_url: String,
pub body: String,
pub assets: Vec<GiteaAsset>,
pub published_at: String,
}
/// Gitea API asset response (internal).
#[derive(Debug, Clone, Deserialize)]
pub struct GiteaAsset {
pub name: String,
pub browser_download_url: String,
pub size: u64,
}

View File

@@ -19,33 +19,72 @@ impl ProjectsStore {
let file_path = data_dir.join("projects.json");
let projects = if file_path.exists() {
let (projects, needs_save) = if file_path.exists() {
match fs::read_to_string(&file_path) {
Ok(data) => match serde_json::from_str(&data) {
Ok(parsed) => parsed,
Err(e) => {
log::error!("Failed to parse projects.json: {}. Starting with empty list.", e);
// Back up the corrupted file
let backup = file_path.with_extension("json.bak");
if let Err(be) = fs::copy(&file_path, &backup) {
log::error!("Failed to back up corrupted projects.json: {}", be);
Ok(data) => {
// First try to parse as Vec<Value> to run migration
match serde_json::from_str::<Vec<serde_json::Value>>(&data) {
Ok(raw_values) => {
let mut migrated = false;
let migrated_values: Vec<serde_json::Value> = raw_values
.into_iter()
.map(|v| {
let has_path = v.as_object().map_or(false, |o| o.contains_key("path") && !o.contains_key("paths"));
if has_path {
migrated = true;
}
crate::models::Project::migrate_from_value(v)
})
.collect();
// Now deserialize the migrated values
let json_str = serde_json::to_string(&migrated_values).unwrap_or_default();
match serde_json::from_str::<Vec<crate::models::Project>>(&json_str) {
Ok(parsed) => (parsed, migrated),
Err(e) => {
log::error!("Failed to parse migrated projects.json: {}. Starting with empty list.", e);
let backup = file_path.with_extension("json.bak");
if let Err(be) = fs::copy(&file_path, &backup) {
log::error!("Failed to back up corrupted projects.json: {}", be);
}
(Vec::new(), false)
}
}
}
Err(e) => {
log::error!("Failed to parse projects.json: {}. Starting with empty list.", e);
let backup = file_path.with_extension("json.bak");
if let Err(be) = fs::copy(&file_path, &backup) {
log::error!("Failed to back up corrupted projects.json: {}", be);
}
(Vec::new(), false)
}
Vec::new()
}
},
}
Err(e) => {
log::error!("Failed to read projects.json: {}", e);
Vec::new()
(Vec::new(), false)
}
}
} else {
Vec::new()
(Vec::new(), false)
};
Ok(Self {
let store = Self {
projects: Mutex::new(projects),
file_path,
})
};
// Persist migrated format back to disk
if needs_save {
log::info!("Migrated projects.json from single-path to multi-path format");
let projects = store.lock();
if let Err(e) = store.save(&projects) {
log::error!("Failed to save migrated projects: {}", e);
}
}
Ok(store)
}
fn lock(&self) -> std::sync::MutexGuard<'_, Vec<Project>> {