diff --git a/Cargo.lock b/Cargo.lock index 96b3f3a7..d431978a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -417,15 +417,15 @@ dependencies = [ [[package]] name = "console" -version = "0.16.0" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d" +checksum = "03e45a4a8926227e4197636ba97a9fc9b00477e9f4bd711395687c5f0734bec4" dependencies = [ "encode_unicode", "libc", "once_cell", "unicode-width 0.2.2", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -1292,9 +1292,9 @@ checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7" [[package]] name = "libc" -version = "0.2.179" +version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5a2d376baa530d1238d133232d15e239abad80d05838b4b59354e5268af431f" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" [[package]] name = "liblzma" @@ -1758,9 +1758,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.11.1" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "positioned-io" @@ -2214,14 +2214,10 @@ name = "soar-cli" version = "0.11.0" dependencies = [ "clap", - "fast-glob", "indicatif", "miette", - "minisign-verify", "nix", "nu-ansi-term", - "once_cell", - "rayon", "regex", "semver", "serde", @@ -2230,8 +2226,9 @@ dependencies = [ "soar-core", "soar-db", "soar-dl", + "soar-events", + "soar-operations", "soar-package", - "soar-registry", "soar-utils", "tabled", "terminal_size", @@ -2323,6 +2320,26 @@ dependencies = [ name = "soar-events" version = "0.0.0" +[[package]] +name = "soar-operations" +version = "0.0.0" +dependencies = [ + "fast-glob", + "minisign-verify", + "once_cell", + "rayon", + "soar-config", + "soar-core", + "soar-db", + "soar-dl", + "soar-events", + "soar-package", + "soar-registry", + "soar-utils", + "tokio", + "tracing", +] + [[package]] name = "soar-package" version = "0.2.3" @@ -2813,9 +2830,9 @@ checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" [[package]] name = "unit-prefix" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817" +checksum = "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3" [[package]] name = "untrusted" diff --git a/Cargo.toml b/Cargo.toml index 4c9ce7c5..5686daa8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ members = [ "crates/soar-db", "crates/soar-dl", "crates/soar-events", + "crates/soar-operations", "crates/soar-package", "crates/soar-registry", "crates/soar-utils", @@ -38,7 +39,9 @@ image = { version = "0.25.9", default-features = false, features = ["png"] } landlock = "0.4.4" libsqlite3-sys = { version = ">=0.30.1,<0.36.0", features = [ "bundled" ]} miette = { version = "7.6.0", features = ["fancy"] } +minisign-verify = "0.2.4" nix = { version = "0.30.1", features = ["fs", "ioctl", "term", "user"] } +once_cell = "1.21" percent-encoding = "2.3.2" rayon = "1.11.0" regex = { version = "1.12.2", default-features = false, features = [ @@ -46,6 +49,7 @@ regex = { version = "1.12.2", default-features = false, features = [ "unicode-case", "unicode-perl" ] } +semver = "1.0.27" serde = { version = "1.0.228", features = ["derive"] } serde_json = { version = "1.0.149", features = ["indexmap"] } serial_test = "3.3.1" @@ -54,12 +58,14 @@ soar-core = { version = "0.13.0", path = "crates/soar-core" } soar-db = { version = "0.4.0", path = "crates/soar-db" } soar-dl = { version = "0.8.0", path = "crates/soar-dl" } soar-events = { version = "0.0.0", path = "crates/soar-events" } +soar-operations = { version = "0.0.0", path = "crates/soar-operations" } soar-package = { version = "0.2.3", path = "crates/soar-package" } soar-registry = { version = "0.3.0", path = "crates/soar-registry" } soar-utils = { version = "0.3.0", path = "crates/soar-utils" } squishy = { version = "0.4.0", features = ["appimage", "dwarfs"] } tempfile = "3.24.0" thiserror = "2.0.17" +tokio = { version = "1.49.0", features = ["macros", "rt-multi-thread", "sync", "time"] } toml = "0.9.10" toml_edit = "0.23.10" tracing = { version = "0.1.44", default-features = false } diff --git a/crates/soar-operations/Cargo.toml b/crates/soar-operations/Cargo.toml new file mode 100644 index 00000000..c1512072 --- /dev/null +++ b/crates/soar-operations/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "soar-operations" +version = "0.0.0" +description = "Business logic for soar package manager" +authors.workspace = true +license.workspace = true +edition.workspace = true +repository.workspace = true +keywords.workspace = true +readme.workspace = true +categories.workspace = true + +[dependencies] +fast-glob = { workspace = true } +minisign-verify = { workspace = true } +once_cell = { workspace = true } +rayon = { workspace = true } +soar-config = { workspace = true } +soar-core = { workspace = true } +soar-db = { workspace = true } +soar-dl = { workspace = true } +soar-events = { workspace = true } +soar-package = { workspace = true } +soar-registry = { workspace = true } +soar-utils = { workspace = true } +tokio = { workspace = true } +tracing = { workspace = true } diff --git a/crates/soar-operations/src/apply.rs b/crates/soar-operations/src/apply.rs new file mode 100644 index 00000000..5f05aadc --- /dev/null +++ b/crates/soar-operations/src/apply.rs @@ -0,0 +1,654 @@ +use std::collections::HashSet; + +use soar_config::packages::{PackagesConfig, ResolvedPackage}; +use soar_core::{ + database::{ + connection::DieselDatabase, + models::{InstalledPackage, Package}, + }, + package::{ + install::InstallTarget, + release_source::{run_version_command, ReleaseSource}, + remove::PackageRemover, + url::UrlPackage, + }, + utils::substitute_placeholders, + SoarResult, +}; +use soar_db::repository::{ + core::{CoreRepository, SortDirection}, + metadata::MetadataRepository, +}; +use soar_events::{RemoveStage, SoarEvent}; +use tracing::{debug, warn}; + +use crate::{ + install::perform_installation, progress::next_op_id, utils::get_package_hooks, ApplyDiff, + ApplyReport, InstallOptions, SoarContext, +}; + +/// Status of a URL package compared against installed packages. +enum UrlPackageStatus { + ToInstall(InstallTarget), + ToUpdate(InstallTarget), + InSync(String), +} + +/// Compute the difference between declared packages (from packages.toml) and +/// installed packages. +/// +/// If `prune` is true, packages installed but not declared will be listed for removal. +pub async fn compute_diff( + ctx: &SoarContext, + resolved: &[ResolvedPackage], + prune: bool, +) -> SoarResult { + debug!( + count = resolved.len(), + prune = prune, + "computing apply diff" + ); + let metadata_mgr = ctx.metadata_manager().await?; + let diesel_db = ctx.diesel_core_db()?.clone(); + + let mut diff = ApplyDiff::default(); + let mut declared_keys: HashSet<(String, Option, Option)> = HashSet::new(); + + for pkg in resolved { + declared_keys.insert((pkg.name.clone(), pkg.pkg_id.clone(), pkg.repo.clone())); + + let is_github_or_gitlab = pkg.github.is_some() || pkg.gitlab.is_some(); + if is_github_or_gitlab || pkg.url.is_some() { + handle_local_package(pkg, is_github_or_gitlab, &diesel_db, &mut diff)?; + continue; + } + + // Find package in metadata + let found_packages: Vec = if let Some(ref repo_name) = pkg.repo { + metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + Some(&pkg.name), + pkg.pkg_id.as_deref(), + pkg.version.as_deref(), + None, + Some(SortDirection::Asc), + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut package: Package = p.into(); + package.repo_name = repo_name.clone(); + package + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + Some(&pkg.name), + pkg.pkg_id.as_deref(), + pkg.version.as_deref(), + None, + Some(SortDirection::Asc), + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut package: Package = p.into(); + package.repo_name = repo_name.to_string(); + package + }) + .collect()) + })? + }; + + if found_packages.is_empty() { + diff.not_found.push(pkg.name.clone()); + continue; + } + + let metadata_pkg = found_packages.into_iter().next().unwrap(); + + let installed_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some(&metadata_pkg.repo_name), + Some(&metadata_pkg.pkg_name), + Some(&metadata_pkg.pkg_id), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + let existing_install = installed_packages.into_iter().find(|ip| ip.is_installed); + + if let Some(ref existing) = existing_install { + let version_matches = pkg.version.as_ref().is_none_or(|v| existing.version == *v); + + if version_matches && existing.version == metadata_pkg.version { + diff.in_sync.push(format!( + "{}#{}@{}", + existing.pkg_name, existing.pkg_id, existing.version + )); + } else if !existing.pinned || pkg.version.is_some() { + let target = create_install_target(pkg, metadata_pkg, Some(existing.clone())); + diff.to_update.push((pkg.clone(), target)); + } else { + diff.in_sync.push(format!( + "{}#{}@{} (pinned)", + existing.pkg_name, existing.pkg_id, existing.version + )); + } + } else { + let target = create_install_target(pkg, metadata_pkg, None); + diff.to_install.push((pkg.clone(), target)); + } + } + + if prune { + let all_installed: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + None, + None, + None, + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .filter(|p| p.is_installed) + .map(Into::into) + .collect(); + + for installed in all_installed { + let is_declared = declared_keys.iter().any(|(name, pkg_id, repo)| { + let name_matches = *name == installed.pkg_name; + let pkg_id_matches = pkg_id.as_ref().is_none_or(|id| *id == installed.pkg_id); + let repo_matches = repo.as_ref().is_none_or(|r| *r == installed.repo_name); + name_matches && pkg_id_matches && repo_matches + }); + + if !is_declared { + diff.to_remove.push(installed); + } + } + } + + Ok(diff) +} + +/// Execute an apply operation from a computed diff. +/// +/// Installs new packages, updates existing ones, removes pruned ones, +/// and updates packages.toml version entries. +pub async fn execute_apply( + ctx: &SoarContext, + diff: ApplyDiff, + no_verify: bool, +) -> SoarResult { + debug!("executing apply"); + let diesel_db = ctx.diesel_core_db()?.clone(); + + let mut installed_count = 0; + let mut updated_count = 0; + let mut removed_count = 0; + let mut failed_count = 0; + + // Apply pending version updates for in-sync packages + for (pkg_name, version) in &diff.pending_version_updates { + if let Err(e) = PackagesConfig::update_package(pkg_name, None, Some(version), None) { + warn!( + "Failed to update version for '{}' in packages.toml: {}", + pkg_name, e + ); + } + } + + // Install new packages + if !diff.to_install.is_empty() { + let mut version_updates: Vec<(String, String)> = Vec::new(); + for (pkg, target) in &diff.to_install { + let declared_version = pkg + .version + .as_ref() + .map(|v| v.strip_prefix('v').unwrap_or(v)); + if declared_version != Some(target.package.version.as_str()) { + version_updates.push((pkg.name.clone(), target.package.version.clone())); + } + } + + let targets: Vec = diff + .to_install + .into_iter() + .map(|(_, target)| target) + .collect(); + + let options = InstallOptions { + no_verify, + ..Default::default() + }; + + let report = perform_installation(ctx, targets, &options).await?; + installed_count = report.installed.len(); + failed_count += report.failed.len(); + + let succeeded: HashSet<&str> = report + .installed + .iter() + .map(|i| i.pkg_name.as_str()) + .collect(); + for (pkg_name, version) in &version_updates { + if succeeded.contains(pkg_name.as_str()) { + if let Err(e) = PackagesConfig::update_package(pkg_name, None, Some(version), None) + { + warn!( + "Failed to update version for '{}' in packages.toml: {}", + pkg_name, e + ); + } + } + } + } + + // Update packages + if !diff.to_update.is_empty() { + let mut update_version_updates: Vec<(String, String)> = Vec::new(); + for (pkg, target) in &diff.to_update { + let declared_version = pkg + .version + .as_ref() + .map(|v| v.strip_prefix('v').unwrap_or(v)); + if declared_version != Some(target.package.version.as_str()) { + update_version_updates.push((pkg.name.clone(), target.package.version.clone())); + } + } + + let targets: Vec = diff + .to_update + .into_iter() + .map(|(_, target)| target) + .collect(); + + let options = InstallOptions { + no_verify, + ..Default::default() + }; + + let report = perform_installation(ctx, targets, &options).await?; + updated_count = report.installed.len(); + failed_count += report.failed.len(); + + let succeeded: HashSet<&str> = report + .installed + .iter() + .map(|i| i.pkg_name.as_str()) + .collect(); + for (pkg_name, version) in &update_version_updates { + if succeeded.contains(pkg_name.as_str()) { + if let Err(e) = PackagesConfig::update_package(pkg_name, None, Some(version), None) + { + warn!( + "Failed to update version for '{}' in packages.toml: {}", + pkg_name, e + ); + } + } + } + } + + // Remove pruned packages + if !diff.to_remove.is_empty() { + for pkg in diff.to_remove { + let op_id = next_op_id(); + ctx.events().emit(SoarEvent::Removing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: RemoveStage::RunningHook("pre_remove".into()), + }); + + let (hooks, sandbox) = get_package_hooks(&pkg.pkg_name); + match PackageRemover::new(pkg.clone(), diesel_db.clone()) + .await + .with_hooks(hooks) + .with_sandbox(sandbox) + .remove() + .await + { + Ok(()) => { + ctx.events().emit(SoarEvent::Removing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: RemoveStage::Complete { + size_freed: None, + }, + }); + removed_count += 1; + } + Err(e) => { + ctx.events().emit(SoarEvent::OperationFailed { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + error: e.to_string(), + }); + failed_count += 1; + } + } + } + } + + Ok(ApplyReport { + installed_count, + updated_count, + removed_count, + failed_count, + }) +} + +/// Handle local (URL/github/gitlab) packages in apply diff. +fn handle_local_package( + pkg: &ResolvedPackage, + is_github_or_gitlab: bool, + diesel_db: &DieselDatabase, + diff: &mut ApplyDiff, +) -> SoarResult<()> { + let local_pkg_id = if is_github_or_gitlab { + pkg.pkg_id.clone().or_else(|| { + pkg.github + .as_ref() + .or(pkg.gitlab.as_ref()) + .map(|repo| repo.replace('/', ".")) + }) + } else { + pkg.pkg_id.clone() + }; + + let installed: Option = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some("local"), + Some(&pkg.name), + local_pkg_id.as_deref(), + None, + Some(true), + None, + Some(1), + None, + ) + })? + .into_iter() + .next() + .map(Into::into); + + // Handle version_command packages + if let Some(ref cmd) = pkg.version_command { + if let Some(ref declared) = pkg.version { + let normalized = declared.strip_prefix('v').unwrap_or(declared); + if let Some(ref existing) = installed { + if existing.version == normalized { + diff.in_sync.push(format!("{} (local)", pkg.name)); + return Ok(()); + } + } + } + + let result = match run_version_command(cmd) { + Ok(r) => r, + Err(e) => { + warn!("Failed to run version_command for {}: {}", pkg.name, e); + diff.not_found + .push(format!("{} (version_command failed: {})", pkg.name, e)); + return Ok(()); + } + }; + + let version = result + .version + .strip_prefix('v') + .unwrap_or(&result.version) + .to_string(); + + if let Some(ref existing) = installed { + if existing.version == version { + let declared = pkg + .version + .as_ref() + .map(|s| s.strip_prefix('v').unwrap_or(s)); + if declared != Some(version.as_str()) { + diff.pending_version_updates + .push((pkg.name.clone(), version.clone())); + } + diff.in_sync.push(format!("{} (local)", pkg.name)); + return Ok(()); + } + } + + let download_url = match result.download_url { + Some(url) => url, + None => { + match &pkg.url { + Some(url) => substitute_placeholders(url, Some(&version)), + None => { + diff.not_found.push(format!( + "{} (version_command returned no URL and no url field configured)", + pkg.name + )); + return Ok(()); + } + } + } + }; + + let mut url_pkg = UrlPackage::from_remote( + &download_url, + Some(&pkg.name), + Some(&version), + pkg.pkg_type.as_deref(), + local_pkg_id.as_deref(), + )?; + url_pkg.size = result.size; + + match check_url_package_status(&url_pkg, pkg, "local", diesel_db)? { + UrlPackageStatus::ToInstall(target) => diff.to_install.push((pkg.clone(), target)), + UrlPackageStatus::ToUpdate(target) => diff.to_update.push((pkg.clone(), target)), + UrlPackageStatus::InSync(label) => diff.in_sync.push(label), + } + return Ok(()); + } + + // Handle github/gitlab packages + if is_github_or_gitlab { + if let Some(ref declared) = pkg.version { + let normalized = declared.strip_prefix('v').unwrap_or(declared); + if let Some(ref existing) = installed { + if existing.version == normalized { + diff.in_sync.push(format!("{} (local)", pkg.name)); + return Ok(()); + } + } + } + + let source = match ReleaseSource::from_resolved(pkg) { + Some(s) => s, + None => { + diff.not_found.push(format!( + "{} (missing asset_pattern for github/gitlab source)", + pkg.name + )); + return Ok(()); + } + }; + let release = match source.resolve_version(pkg.version.as_deref()) { + Ok(r) => r, + Err(e) => { + warn!("Failed to resolve release for {}: {}", pkg.name, e); + diff.not_found.push(format!("{} ({})", pkg.name, e)); + return Ok(()); + } + }; + let version = release + .version + .strip_prefix('v') + .unwrap_or(&release.version) + .to_string(); + + let url_pkg = UrlPackage::from_remote( + &release.download_url, + Some(&pkg.name), + Some(&version), + pkg.pkg_type.as_deref(), + local_pkg_id.as_deref(), + )?; + + match check_url_package_status(&url_pkg, pkg, "local", diesel_db)? { + UrlPackageStatus::ToInstall(target) => diff.to_install.push((pkg.clone(), target)), + UrlPackageStatus::ToUpdate(target) => diff.to_update.push((pkg.clone(), target)), + UrlPackageStatus::InSync(label) => diff.in_sync.push(label), + } + return Ok(()); + } + + // Handle plain URL packages + if let Some(ref url) = pkg.url { + if let Some(ref declared) = pkg.version { + let normalized = declared.strip_prefix('v').unwrap_or(declared); + if let Some(ref existing) = installed { + if existing.version == normalized { + diff.in_sync.push(format!("{} (local)", pkg.name)); + return Ok(()); + } + } + } + + let url = substitute_placeholders(url, pkg.version.as_deref()); + let url_pkg = UrlPackage::from_remote( + &url, + Some(&pkg.name), + pkg.version.as_deref(), + pkg.pkg_type.as_deref(), + pkg.pkg_id.as_deref(), + )?; + + match check_url_package_status(&url_pkg, pkg, "local", diesel_db)? { + UrlPackageStatus::ToInstall(target) => diff.to_install.push((pkg.clone(), target)), + UrlPackageStatus::ToUpdate(target) => diff.to_update.push((pkg.clone(), target)), + UrlPackageStatus::InSync(label) => diff.in_sync.push(label), + } + } + + Ok(()) +} + +fn check_url_package_status( + url_pkg: &UrlPackage, + pkg: &ResolvedPackage, + display_label: &str, + diesel_db: &DieselDatabase, +) -> SoarResult { + let installed_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some("local"), + Some(&url_pkg.pkg_name), + Some(&url_pkg.pkg_id), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + let installed = installed_packages + .iter() + .find(|ip| ip.is_installed) + .cloned(); + + if let Some(ref existing) = installed { + if url_pkg.version != existing.version { + let target = create_url_install_target(url_pkg, pkg, installed); + Ok(UrlPackageStatus::ToUpdate(target)) + } else { + Ok(UrlPackageStatus::InSync(format!( + "{} ({})", + pkg.name, display_label + ))) + } + } else { + let existing_install = installed_packages.into_iter().next(); + let target = create_url_install_target(url_pkg, pkg, existing_install); + Ok(UrlPackageStatus::ToInstall(target)) + } +} + +fn create_install_target( + resolved: &ResolvedPackage, + package: Package, + existing: Option, +) -> InstallTarget { + InstallTarget { + package, + existing_install: existing, + pinned: resolved.pinned, + profile: resolved.profile.clone(), + portable: resolved.portable.as_ref().and_then(|p| p.path.clone()), + portable_home: resolved.portable.as_ref().and_then(|p| p.home.clone()), + portable_config: resolved.portable.as_ref().and_then(|p| p.config.clone()), + portable_share: resolved.portable.as_ref().and_then(|p| p.share.clone()), + portable_cache: resolved.portable.as_ref().and_then(|p| p.cache.clone()), + entrypoint: resolved.entrypoint.clone(), + binaries: resolved.binaries.clone(), + nested_extract: resolved.nested_extract.clone(), + extract_root: resolved.extract_root.clone(), + hooks: resolved.hooks.clone(), + build: resolved.build.clone(), + sandbox: resolved.sandbox.clone(), + } +} + +fn create_url_install_target( + url_pkg: &UrlPackage, + resolved: &ResolvedPackage, + existing: Option, +) -> InstallTarget { + InstallTarget { + package: url_pkg.to_package(), + existing_install: existing, + pinned: resolved.pinned, + profile: resolved.profile.clone(), + portable: resolved.portable.as_ref().and_then(|p| p.path.clone()), + portable_home: resolved.portable.as_ref().and_then(|p| p.home.clone()), + portable_config: resolved.portable.as_ref().and_then(|p| p.config.clone()), + portable_share: resolved.portable.as_ref().and_then(|p| p.share.clone()), + portable_cache: resolved.portable.as_ref().and_then(|p| p.cache.clone()), + entrypoint: resolved.entrypoint.clone(), + binaries: resolved.binaries.clone(), + nested_extract: resolved.nested_extract.clone(), + extract_root: resolved.extract_root.clone(), + hooks: resolved.hooks.clone(), + build: resolved.build.clone(), + sandbox: resolved.sandbox.clone(), + } +} diff --git a/crates/soar-operations/src/context.rs b/crates/soar-operations/src/context.rs new file mode 100644 index 00000000..0e71f393 --- /dev/null +++ b/crates/soar-operations/src/context.rs @@ -0,0 +1,305 @@ +use std::{ + fs::{self, File}, + path::Path, + sync::Arc, +}; + +use once_cell::sync::OnceCell; +use soar_config::{config::Config, repository::Repository}; +use soar_core::{ + database::connection::{DieselDatabase, MetadataManager}, + error::{ErrorContext, SoarError}, + SoarResult, +}; +use soar_db::{ + connection::DbConnection, + migration::DbType, + repository::{core::CoreRepository, metadata::MetadataRepository}, +}; +use soar_events::{EventSinkHandle, LogLevel, SoarEvent, SyncStage}; +use soar_registry::{fetch_metadata, write_metadata_db, MetadataContent, RemotePackage}; +use tokio::sync::OnceCell as AsyncOnceCell; +use tracing::{debug, trace}; + +type SyncTaskResult = ( + soar_registry::Result>, + String, +); + +fn handle_json_metadata>( + metadata: &[RemotePackage], + metadata_db: P, + repo_name: &str, +) -> SoarResult<()> { + let metadata_db = metadata_db.as_ref(); + if metadata_db.exists() { + fs::remove_file(metadata_db) + .with_context(|| format!("removing metadata file {}", metadata_db.display()))?; + } + + let mut conn = DbConnection::open(metadata_db, DbType::Metadata) + .map_err(|e| SoarError::Custom(format!("opening metadata database: {}", e)))?; + + MetadataRepository::import_packages(conn.conn(), metadata, repo_name) + .map_err(|e| SoarError::Custom(format!("importing packages: {}", e)))?; + + Ok(()) +} + +#[derive(Clone)] +pub struct SoarContext { + inner: Arc, +} + +struct SoarContextInner { + config: Config, + events: EventSinkHandle, + diesel_core_db: OnceCell, + metadata_manager: AsyncOnceCell, +} + +impl SoarContext { + pub fn new(config: Config, events: EventSinkHandle) -> Self { + Self { + inner: Arc::new(SoarContextInner { + config, + events, + diesel_core_db: OnceCell::new(), + metadata_manager: AsyncOnceCell::new(), + }), + } + } + + #[inline] + pub fn config(&self) -> &Config { + &self.inner.config + } + + #[inline] + pub fn events(&self) -> &EventSinkHandle { + &self.inner.events + } + + pub fn diesel_core_db(&self) -> SoarResult<&DieselDatabase> { + self.inner + .diesel_core_db + .get_or_try_init(|| self.create_diesel_core_db()) + } + + pub async fn metadata_manager(&self) -> SoarResult<&MetadataManager> { + self.inner + .metadata_manager + .get_or_try_init(|| { + async { + self.init_repo_dbs(false).await?; + self.create_metadata_manager() + } + }) + .await + } + + pub async fn sync(&self) -> SoarResult<()> { + debug!("starting sync"); + self.init_repo_dbs(true).await?; + Ok(()) + } + + async fn init_repo_dbs(&self, force: bool) -> SoarResult<()> { + debug!( + force = force, + repos = self.inner.config.repositories.len(), + "initializing repository databases" + ); + let mut tasks = Vec::new(); + + for repo in &self.inner.config.repositories { + trace!(repo_name = repo.name, "scheduling repository sync"); + let repo_clone = repo.clone(); + let etag = self.read_repo_etag(&repo_clone); + let events = self.inner.events.clone(); + let repo_name = repo.name.clone(); + + let task: tokio::task::JoinHandle = tokio::task::spawn(async move { + if force { + events.emit(SoarEvent::SyncProgress { + repo_name: repo_name.clone(), + stage: SyncStage::Fetching, + }); + } + let result = fetch_metadata(&repo_clone, force, etag).await; + (result, repo_name) + }); + tasks.push((task, repo)); + } + + for (task, repo) in tasks { + let (result, repo_name) = task + .await + .map_err(|err| SoarError::Custom(format!("Join handle error: {err}")))?; + + match result { + Ok(Some((etag, content))) => { + let repo_path = repo.get_path()?; + let metadata_db_path = repo_path.join("metadata.db"); + + self.inner.events.emit(SoarEvent::SyncProgress { + repo_name: repo_name.clone(), + stage: SyncStage::Decompressing, + }); + + self.inner.events.emit(SoarEvent::SyncProgress { + repo_name: repo_name.clone(), + stage: SyncStage::WritingDatabase, + }); + + match content { + MetadataContent::SqliteDb(db_bytes) => { + write_metadata_db(&db_bytes, &metadata_db_path) + .map_err(|e| SoarError::Custom(e.to_string()))?; + } + MetadataContent::Json(packages) => { + handle_json_metadata(&packages, &metadata_db_path, &repo.name)?; + } + } + + self.inner.events.emit(SoarEvent::SyncProgress { + repo_name: repo_name.clone(), + stage: SyncStage::Validating, + }); + + self.validate_packages(repo, &etag).await?; + + self.inner.events.emit(SoarEvent::SyncProgress { + repo_name: repo_name.clone(), + stage: SyncStage::Complete { + package_count: None, + }, + }); + } + Ok(None) => { + if force { + self.inner.events.emit(SoarEvent::SyncProgress { + repo_name: repo_name.clone(), + stage: SyncStage::UpToDate, + }); + } + } + Err(err) => { + self.inner.events.emit(SoarEvent::Log { + level: LogLevel::Error, + message: format!("Failed to sync repository {}: {err}", repo.name), + }); + } + }; + } + + Ok(()) + } + + async fn validate_packages(&self, repo: &Repository, etag: &str) -> SoarResult<()> { + trace!( + repo_name = repo.name, + "validating installed packages against repository" + ); + let diesel_core_db = self.diesel_core_db()?; + let repo_name = repo.name.clone(); + + let repo_path = repo.get_path()?; + let metadata_db_path = repo_path.join("metadata.db"); + + let metadata_db = DieselDatabase::open_metadata(&metadata_db_path)?; + + let installed_packages = diesel_core_db.with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some(&repo_name), + None, + None, + None, + None, + None, + None, + None, + ) + })?; + + for pkg in installed_packages { + let exists = metadata_db + .with_conn(|conn| MetadataRepository::exists_by_pkg_id(conn, &pkg.pkg_id))?; + + if !exists { + let replacement = metadata_db.with_conn(|conn| { + MetadataRepository::find_replacement_pkg_id(conn, &pkg.pkg_id) + })?; + + if let Some(new_pkg_id) = replacement { + self.inner.events.emit(SoarEvent::Log { + level: LogLevel::Info, + message: format!( + "{} is replaced by {} in {}", + pkg.pkg_id, new_pkg_id, repo_name + ), + }); + + diesel_core_db.with_conn(|conn| { + CoreRepository::update_pkg_id(conn, &repo_name, &pkg.pkg_id, &new_pkg_id) + })?; + } + } + } + + metadata_db + .with_conn(|conn| MetadataRepository::update_repo_metadata(conn, &repo.name, etag))?; + + Ok(()) + } + + fn create_diesel_core_db(&self) -> SoarResult { + let core_db_file = self.config().get_db_path()?.join("soar.db"); + if !core_db_file.exists() { + if let Some(parent) = core_db_file.parent() { + std::fs::create_dir_all(parent) + .with_context(|| format!("creating database directory {}", parent.display()))?; + } + File::create(&core_db_file) + .with_context(|| format!("creating database file {}", core_db_file.display()))?; + } + + DieselDatabase::open_core(&core_db_file) + } + + fn create_metadata_manager(&self) -> SoarResult { + debug!("creating metadata manager"); + let mut manager = MetadataManager::new(); + + for repo in &self.inner.config.repositories { + if let Ok(repo_path) = repo.get_path() { + let metadata_db = repo_path.join("metadata.db"); + if metadata_db.is_file() { + trace!( + repo_name = repo.name, + "adding repository to metadata manager" + ); + manager.add_repo(&repo.name, metadata_db)?; + } + } + } + + debug!(repos = manager.repo_count(), "metadata manager created"); + Ok(manager) + } + + fn read_repo_etag(&self, repo: &Repository) -> Option { + let repo_path = repo.get_path().ok()?; + let metadata_db = repo_path.join("metadata.db"); + + if !metadata_db.exists() { + return None; + } + + let mut conn = DbConnection::open(&metadata_db, DbType::Metadata).ok()?; + MetadataRepository::get_repo_etag(conn.conn()) + .ok() + .flatten() + } +} diff --git a/crates/soar-operations/src/health.rs b/crates/soar-operations/src/health.rs new file mode 100644 index 00000000..019410b9 --- /dev/null +++ b/crates/soar-operations/src/health.rs @@ -0,0 +1,166 @@ +use std::path::PathBuf; + +use soar_config::config::is_system_mode; +use soar_core::{package::remove::PackageRemover, SoarResult}; +use soar_db::repository::core::CoreRepository; +use soar_events::{RemoveStage, SoarEvent}; +use soar_utils::{error::FileSystemResult, fs::walk_dir, path::icons_dir}; +use tracing::debug; + +use crate::{ + progress::next_op_id, utils::get_package_hooks, BrokenPackage, FailedInfo, HealthReport, + RemoveReport, RemovedInfo, SoarContext, +}; + +/// Check system health: PATH configuration, broken packages, and broken symlinks. +pub fn check_health(ctx: &SoarContext) -> SoarResult { + debug!("checking system health"); + let config = ctx.config(); + let bin_path = config.get_bin_path()?; + + let path_env = std::env::var("PATH").unwrap_or_default(); + let path_configured = path_env + .split(':') + .any(|p| std::path::Path::new(p) == bin_path); + + let broken_packages = get_broken_packages(ctx)?; + let broken_symlinks = get_broken_symlinks(ctx)?; + + Ok(HealthReport { + path_configured, + bin_path, + broken_packages, + broken_symlinks, + }) +} + +/// Remove all broken packages (those whose installed_path no longer exists). +pub async fn remove_broken_packages(ctx: &SoarContext) -> SoarResult { + debug!("removing broken packages"); + let diesel_db = ctx.diesel_core_db()?.clone(); + + let broken = diesel_db.with_conn(CoreRepository::list_broken)?; + + let mut removed = Vec::new(); + let mut failed = Vec::new(); + + for package in broken { + let op_id = next_op_id(); + let pkg_name = package.pkg_name.clone(); + let pkg_id = package.pkg_id.clone(); + let repo_name = package.repo_name.clone(); + let version = package.version.clone(); + + ctx.events().emit(SoarEvent::Removing { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + stage: RemoveStage::RemovingDirectory, + }); + + let (hooks, sandbox) = get_package_hooks(&pkg_name); + let installed_pkg = package.into(); + let remover = PackageRemover::new(installed_pkg, diesel_db.clone()) + .await + .with_hooks(hooks) + .with_sandbox(sandbox); + + match remover.remove().await { + Ok(()) => { + ctx.events().emit(SoarEvent::Removing { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + stage: RemoveStage::Complete { + size_freed: None, + }, + }); + removed.push(RemovedInfo { + pkg_name, + pkg_id, + repo_name, + version, + }); + } + Err(err) => { + ctx.events().emit(SoarEvent::OperationFailed { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + error: err.to_string(), + }); + failed.push(FailedInfo { + pkg_name, + pkg_id, + error: err.to_string(), + }); + } + } + } + + Ok(RemoveReport { + removed, + failed, + }) +} + +/// Remove broken symlinks in bin, desktop, and icons directories. +pub fn remove_broken_symlinks(ctx: &SoarContext) -> SoarResult> { + let broken = get_broken_symlinks(ctx)?; + + let mut removed = Vec::new(); + for path in &broken { + if std::fs::remove_file(path).is_ok() { + removed.push(path.clone()); + } + } + + Ok(removed) +} + +fn get_broken_packages(ctx: &SoarContext) -> SoarResult> { + let diesel_db = ctx.diesel_core_db()?; + let broken = diesel_db.with_conn(CoreRepository::list_broken)?; + + Ok(broken + .into_iter() + .map(|p| { + BrokenPackage { + pkg_name: p.pkg_name, + pkg_id: p.pkg_id, + installed_path: p.installed_path, + } + }) + .collect()) +} + +fn get_broken_symlinks(ctx: &SoarContext) -> SoarResult> { + let config = ctx.config(); + let mut broken = Vec::new(); + + let bin_path = config.get_bin_path()?; + walk_dir( + &bin_path, + &mut |path: &std::path::Path| -> FileSystemResult<()> { + if !path.exists() { + broken.push(path.to_path_buf()); + } + Ok(()) + }, + )?; + + let desktop_path = config.get_desktop_path()?; + let mut soar_check = |path: &std::path::Path| -> FileSystemResult<()> { + if let Some(filename) = path.file_stem().and_then(|s| s.to_str()) { + if filename.ends_with("-soar") && !path.exists() { + broken.push(path.to_path_buf()); + } + } + Ok(()) + }; + + walk_dir(&desktop_path, &mut soar_check)?; + walk_dir(icons_dir(is_system_mode()), &mut soar_check)?; + + Ok(broken) +} diff --git a/crates/soar-operations/src/install.rs b/crates/soar-operations/src/install.rs new file mode 100644 index 00000000..19acd588 --- /dev/null +++ b/crates/soar-operations/src/install.rs @@ -0,0 +1,1099 @@ +use std::{ + fs::{self, File}, + io::{BufReader, Read}, + path::{Path, PathBuf}, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc, Mutex, + }, + time::Duration, +}; + +use minisign_verify::{PublicKey, Signature}; +use soar_config::utils::default_install_patterns; +use soar_core::{ + database::{ + connection::{DieselDatabase, MetadataManager}, + models::{InstalledPackage, Package}, + }, + error::{ErrorContext, SoarError}, + package::{ + install::{InstallMarker, InstallTarget, PackageInstaller}, + query::PackageQuery, + update::remove_old_versions, + url::UrlPackage, + }, + SoarResult, +}; +use soar_db::repository::{ + core::{CoreRepository, SortDirection}, + metadata::MetadataRepository, +}; +use soar_events::{InstallStage, SoarEvent, VerifyStage}; +use soar_package::integrate_package; +use soar_utils::{ + hash::{calculate_checksum, hash_string}, + lock::FileLock, + pattern::apply_sig_variants, +}; +use tokio::sync::Semaphore; +use tracing::{debug, trace, warn}; + +use crate::{ + progress::{create_progress_bridge, next_op_id}, + utils::{has_desktop_integration, mangle_package_symlinks}, + FailedInfo, InstallOptions, InstallReport, InstalledInfo, ResolveResult, SoarContext, +}; + +/// Resolve package queries into install targets or ambiguity results. +/// +/// For each query string, returns a [`ResolveResult`] indicating whether the package +/// was resolved, is ambiguous (multiple candidates), not found, or already installed. +pub async fn resolve_packages( + ctx: &SoarContext, + packages: &[String], + options: &InstallOptions, +) -> SoarResult> { + debug!(count = packages.len(), "resolving packages for install"); + let metadata_mgr = ctx.metadata_manager().await?; + let diesel_db = ctx.diesel_core_db()?; + + let mut results = Vec::with_capacity(packages.len()); + + for package in packages { + if UrlPackage::is_remote(package) { + results.push(resolve_url_package(diesel_db, package, options)?); + continue; + } + + let query = PackageQuery::try_from(package.as_str())?; + + // Handle #all: install all packages with same pkg_id + if let Some(ref pkg_id) = query.pkg_id { + if pkg_id == "all" { + results.push(resolve_all_variants( + metadata_mgr, + diesel_db, + &query, + options, + )?); + continue; + } + } + + // Handle pkg_id-only queries (no name) + if query.name.is_none() && query.pkg_id.is_some() { + results.push(resolve_by_pkg_id(metadata_mgr, diesel_db, &query, options)?); + continue; + } + + // Normal resolution + results.push(resolve_normal( + metadata_mgr, + diesel_db, + package, + &query, + options, + )?); + } + + Ok(results) +} + +fn resolve_url_package( + diesel_db: &DieselDatabase, + package: &str, + options: &InstallOptions, +) -> SoarResult { + let url_pkg = UrlPackage::from_remote( + package, + options.name_override.as_deref(), + options.version_override.as_deref(), + options.pkg_type_override.as_deref(), + options.pkg_id_override.as_deref(), + )?; + + let installed_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some("local"), + Some(&url_pkg.pkg_name), + Some(&url_pkg.pkg_id), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + let installed_pkg = installed_packages.iter().find(|ip| ip.is_installed); + + if let Some(installed) = installed_pkg { + if !options.force { + return Ok(ResolveResult::AlreadyInstalled { + pkg_name: installed.pkg_name.clone(), + pkg_id: installed.pkg_id.clone(), + repo_name: installed.repo_name.clone(), + version: installed.version.clone(), + }); + } + } + + let existing_install = installed_pkg + .cloned() + .or_else(|| installed_packages.into_iter().next()); + + Ok(ResolveResult::Resolved(vec![InstallTarget { + package: url_pkg.to_package(), + existing_install, + pinned: false, + profile: None, + ..Default::default() + }])) +} + +fn resolve_all_variants( + metadata_mgr: &MetadataManager, + diesel_db: &DieselDatabase, + query: &PackageQuery, + options: &InstallOptions, +) -> SoarResult { + let variants: Vec = if let Some(ref repo_name) = query.repo_name { + metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + query.name.as_deref(), + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.clone(); + pkg + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + query.name.as_deref(), + None, + None, + None, + Some(SortDirection::Asc), + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + })? + }; + + if variants.is_empty() { + return Ok(ResolveResult::NotFound( + query.name.clone().unwrap_or_default(), + )); + } + + // Multiple distinct pkg_ids -> ambiguous, caller must pick + if variants.len() > 1 { + let first_pkg_id = &variants[0].pkg_id; + let all_same_pkg_id = variants.iter().all(|v| v.pkg_id == *first_pkg_id); + if !all_same_pkg_id { + return Ok(ResolveResult::Ambiguous(crate::AmbiguousPackage { + query: query.name.clone().unwrap_or_default(), + candidates: variants, + })); + } + } + + let target_pkg_id = variants[0].pkg_id.clone(); + + // Find all packages with this pkg_id + let all_pkgs: Vec = if let Some(ref repo_name) = query.repo_name { + metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + None, + Some(&target_pkg_id), + None, + None, + Some(SortDirection::Asc), + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.clone(); + pkg + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + None, + Some(&target_pkg_id), + None, + None, + Some(SortDirection::Asc), + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + })? + }; + + let installed_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + None, + Some(&target_pkg_id), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + let mut targets = Vec::new(); + for pkg in all_pkgs { + let existing_install = installed_packages + .iter() + .find(|ip| ip.pkg_name == pkg.pkg_name) + .cloned(); + + if let Some(ref existing) = existing_install { + if existing.is_installed && !options.force { + continue; + } + } + + let pkg = pkg.resolve(query.version.as_deref()); + + targets.push(InstallTarget { + package: pkg, + existing_install, + pinned: query.version.is_some(), + profile: None, + ..Default::default() + }); + } + + Ok(ResolveResult::Resolved(targets)) +} + +fn resolve_by_pkg_id( + metadata_mgr: &MetadataManager, + diesel_db: &DieselDatabase, + query: &PackageQuery, + options: &InstallOptions, +) -> SoarResult { + let installed_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + query.name.as_deref(), + query.pkg_id.as_deref(), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + let repo_pkgs: Vec = if let Some(ref repo_name) = query.repo_name { + metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + None, + query.pkg_id.as_deref(), + None, + None, + None, + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.clone(); + pkg + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + None, + query.pkg_id.as_deref(), + None, + None, + None, + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + })? + }; + + let repo_pkgs: Vec = if let Some(ref version) = query.version { + repo_pkgs + .into_iter() + .filter(|p| p.has_version(version)) + .collect() + } else { + repo_pkgs + }; + + let mut targets = Vec::new(); + for pkg in repo_pkgs { + let pkg = pkg.resolve(query.version.as_deref()); + + let existing_install = installed_packages + .iter() + .find(|ip| ip.pkg_name == pkg.pkg_name) + .cloned(); + + if let Some(ref existing) = existing_install { + if existing.is_installed && !options.force { + continue; + } + } + + targets.push(InstallTarget { + package: pkg, + existing_install, + pinned: query.version.is_some(), + profile: None, + ..Default::default() + }); + } + + Ok(ResolveResult::Resolved(targets)) +} + +fn resolve_normal( + metadata_mgr: &MetadataManager, + diesel_db: &DieselDatabase, + package_name: &str, + query: &PackageQuery, + options: &InstallOptions, +) -> SoarResult { + let installed_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + query.name.as_deref(), + query.pkg_id.as_deref(), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + let maybe_existing = installed_packages.first().cloned(); + + let packages: Vec = find_packages(metadata_mgr, query, &maybe_existing)?; + + let packages: Vec = if let Some(ref version) = query.version { + packages + .into_iter() + .filter(|p| p.has_version(version)) + .collect() + } else { + packages + }; + + match packages.len() { + 0 => Ok(ResolveResult::NotFound(package_name.to_string())), + 1 => { + let pkg = packages.into_iter().next().unwrap(); + let installed_pkg = installed_packages.iter().find(|ip| ip.is_installed); + + if let Some(installed) = installed_pkg { + if !options.force { + return Ok(ResolveResult::AlreadyInstalled { + pkg_name: installed.pkg_name.clone(), + pkg_id: installed.pkg_id.clone(), + repo_name: installed.repo_name.clone(), + version: installed.version.clone(), + }); + } + } + + let existing_install = installed_packages + .iter() + .find(|ip| ip.version == pkg.version) + .cloned(); + + let pkg = pkg.resolve(query.version.as_deref()); + + Ok(ResolveResult::Resolved(vec![InstallTarget { + package: pkg, + existing_install, + pinned: query.version.is_some(), + profile: None, + ..Default::default() + }])) + } + _ => { + Ok(ResolveResult::Ambiguous(crate::AmbiguousPackage { + query: package_name.to_string(), + candidates: packages, + })) + } + } +} + +fn find_packages( + metadata_mgr: &MetadataManager, + query: &PackageQuery, + existing_install: &Option, +) -> SoarResult> { + // If we have an existing install, try to find it in its original repo first + if let Some(existing) = existing_install { + let existing_pkgs: Vec = metadata_mgr + .query_repo(&existing.repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + Some(&existing.pkg_name), + Some(&existing.pkg_id), + None, + None, + None, + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = existing.repo_name.clone(); + pkg + }) + .collect(); + + if !existing_pkgs.is_empty() { + return Ok(existing_pkgs); + } + } + + if let Some(ref repo_name) = query.repo_name { + Ok(metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + query.name.as_deref(), + query.pkg_id.as_deref(), + None, + None, + None, + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.clone(); + pkg + }) + .collect()) + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + query.name.as_deref(), + query.pkg_id.as_deref(), + None, + None, + None, + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + }) + } +} + +/// Install resolved targets. Emits events through the context's event sink. +/// +/// Handles concurrency control, download, verification, symlink creation, +/// desktop integration, and database recording. +pub async fn perform_installation( + ctx: &SoarContext, + targets: Vec, + options: &InstallOptions, +) -> SoarResult { + debug!(count = targets.len(), "performing installation"); + let diesel_db = ctx.diesel_core_db()?.clone(); + let parallel_limit = ctx.config().parallel_limit.unwrap_or(4); + let semaphore = Arc::new(Semaphore::new(parallel_limit as usize)); + + let installed = Arc::new(Mutex::new(Vec::new())); + let failed = Arc::new(Mutex::new(Vec::new())); + let warnings = Arc::new(Mutex::new(Vec::new())); + + let total = targets.len() as u32; + let completed = Arc::new(AtomicU32::new(0)); + let failed_count = Arc::new(AtomicU32::new(0)); + + let mut handles = Vec::new(); + + for target in targets { + let permit = semaphore.clone().acquire_owned().await.unwrap(); + let ctx = ctx.clone(); + let db = diesel_db.clone(); + let installed = installed.clone(); + let failed = failed.clone(); + let warnings = warnings.clone(); + let completed = completed.clone(); + let failed_count = failed_count.clone(); + let binary_only = options.binary_only; + let no_verify = options.no_verify; + let portable = options.portable.clone(); + let portable_home = options.portable_home.clone(); + let portable_config = options.portable_config.clone(); + let portable_share = options.portable_share.clone(); + let portable_cache = options.portable_cache.clone(); + + let handle = tokio::spawn(async move { + let result = install_single_package( + &ctx, + &target, + db.clone(), + binary_only, + no_verify, + portable.as_deref(), + portable_home.as_deref(), + portable_config.as_deref(), + portable_share.as_deref(), + portable_cache.as_deref(), + ) + .await; + + match result { + Ok((install_dir, symlinks)) => { + if !install_dir.as_os_str().is_empty() { + installed.lock().unwrap().push(InstalledInfo { + pkg_name: target.package.pkg_name.clone(), + pkg_id: target.package.pkg_id.clone(), + repo_name: target.package.repo_name.clone(), + version: target.package.version.clone(), + install_dir, + symlinks, + notes: target.package.notes.clone(), + }); + } + let _ = remove_old_versions(&target.package, &db, false); + } + Err(err) => { + match err { + SoarError::Warning(msg) => { + warnings.lock().unwrap().push(msg); + let _ = remove_old_versions(&target.package, &db, false); + } + _ => { + let op_id = next_op_id(); + ctx.events().emit(SoarEvent::OperationFailed { + op_id, + pkg_name: target.package.pkg_name.clone(), + pkg_id: target.package.pkg_id.clone(), + error: err.to_string(), + }); + failed.lock().unwrap().push(FailedInfo { + pkg_name: target.package.pkg_name.clone(), + pkg_id: target.package.pkg_id.clone(), + error: err.to_string(), + }); + failed_count.fetch_add(1, Ordering::Relaxed); + } + } + } + } + + let done = completed.fetch_add(1, Ordering::Relaxed) + 1; + ctx.events().emit(SoarEvent::BatchProgress { + completed: done, + total, + failed: failed_count.load(Ordering::Relaxed), + }); + + drop(permit); + }); + handles.push(handle); + } + + for handle in handles { + handle + .await + .map_err(|err| SoarError::Custom(format!("Join handle error: {err}")))?; + } + + let installed = Arc::try_unwrap(installed).unwrap().into_inner().unwrap(); + let failed = Arc::try_unwrap(failed).unwrap().into_inner().unwrap(); + let warnings = Arc::try_unwrap(warnings).unwrap().into_inner().unwrap(); + + Ok(InstallReport { + installed, + failed, + warnings, + }) +} + +#[allow(clippy::too_many_arguments)] +async fn install_single_package( + ctx: &SoarContext, + target: &InstallTarget, + core_db: DieselDatabase, + binary_only: bool, + no_verify: bool, + portable: Option<&str>, + portable_home: Option<&str>, + portable_config: Option<&str>, + portable_share: Option<&str>, + portable_cache: Option<&str>, +) -> SoarResult<(PathBuf, Vec<(PathBuf, PathBuf)>)> { + let op_id = next_op_id(); + let events = ctx.events().clone(); + let pkg = &target.package; + + debug!( + pkg_name = pkg.pkg_name, + pkg_id = pkg.pkg_id, + version = pkg.version, + "installing package" + ); + + // Acquire lock with a bounded retry count to avoid hanging on stale locks + const MAX_LOCK_ATTEMPTS: u32 = 120; // 60 seconds at 500ms intervals + let mut lock_attempts = 0u32; + let _package_lock = loop { + match FileLock::try_acquire(&pkg.pkg_name) { + Ok(Some(lock)) => break Ok(lock), + Ok(None) => { + lock_attempts += 1; + if lock_attempts == 1 { + debug!("waiting for lock on '{}'", pkg.pkg_name); + } + if lock_attempts >= MAX_LOCK_ATTEMPTS { + break Err(soar_utils::error::LockError::AcquireFailed(format!( + "timed out waiting for lock on '{}' after {}s", + pkg.pkg_name, + MAX_LOCK_ATTEMPTS / 2 + ))); + } + tokio::time::sleep(Duration::from_millis(500)).await; + } + Err(err) => break Err(err), + } + } + .map_err(|e| SoarError::Custom(format!("Failed to acquire package lock: {}", e)))?; + + // Re-check if package is already installed after acquiring lock + let freshly_installed = core_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some(&pkg.repo_name), + Some(&pkg.pkg_name), + Some(&pkg.pkg_id), + Some(&pkg.version), + Some(true), + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .find(|ip| ip.is_installed); + + if freshly_installed.is_some() { + return Ok((PathBuf::new(), Vec::new())); + } + + let config = ctx.config(); + let bin_dir = config.get_bin_path()?; + + let dir_suffix: String = pkg + .bsum + .as_ref() + .filter(|s| s.len() >= 12) + .map(|s| s[..12].to_string()) + .unwrap_or_else(|| { + let input = format!("{}:{}:{}", pkg.pkg_id, pkg.pkg_name, pkg.version); + hash_string(&input)[..12].to_string() + }); + + let install_dir = config + .get_packages_path(target.profile.clone())? + .join(format!("{}-{}-{}", pkg.pkg_name, pkg.pkg_id, dir_suffix)); + let real_bin = install_dir.join(&pkg.pkg_name); + + let ( + unlinked, + eff_portable, + eff_portable_home, + eff_portable_config, + eff_portable_share, + eff_portable_cache, + excludes, + ) = if let Some(ref existing) = target.existing_install { + ( + existing.unlinked, + existing.portable_path.as_deref(), + existing.portable_home.as_deref(), + existing.portable_config.as_deref(), + existing.portable_share.as_deref(), + existing.portable_cache.as_deref(), + existing.install_patterns.as_deref(), + ) + } else { + ( + false, + portable, + portable_home, + portable_config, + portable_share, + portable_cache, + None, + ) + }; + + let should_cleanup = if let Some(ref existing) = target.existing_install { + if existing.is_installed { + true + } else { + match InstallMarker::read_from_dir(&install_dir) { + Some(marker) => !marker.matches_package(pkg), + None => true, + } + } + } else { + false + }; + + if should_cleanup && install_dir.exists() { + debug!(path = %install_dir.display(), "cleaning up existing installation directory"); + fs::remove_dir_all(&install_dir).map_err(|err| { + SoarError::Custom(format!( + "Failed to clean up install directory {}: {}", + install_dir.display(), + err + )) + })?; + } + + let install_patterns = excludes.map(|e| e.to_vec()).unwrap_or_else(|| { + if binary_only { + let mut patterns = default_install_patterns(); + patterns.extend( + ["!*.png", "!*.svg", "!*.desktop", "!LICENSE", "!CHECKSUM"] + .iter() + .map(ToString::to_string), + ); + patterns + } else { + config.install_patterns.clone().unwrap_or_default() + } + }); + let install_patterns = apply_sig_variants(install_patterns); + + // Create progress bridge for download events + let progress_callback = create_progress_bridge( + events.clone(), + op_id, + pkg.pkg_name.clone(), + pkg.pkg_id.clone(), + ); + + trace!(install_dir = %install_dir.display(), "creating package installer"); + let installer = PackageInstaller::new( + target, + &install_dir, + Some(progress_callback), + core_db.clone(), + install_patterns.to_vec(), + ) + .await?; + + // Download + let downloaded_checksum = installer.download_package().await?; + + // Signature verification + if let Some(repository) = config.get_repository(&pkg.repo_name) { + if repository.signature_verification() { + events.emit(SoarEvent::Verifying { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: VerifyStage::Signature, + }); + + let repository_path = repository.get_path()?; + let pubkey_file = repository_path.join("minisign.pub"); + if pubkey_file.exists() { + verify_signatures(&pubkey_file, &install_dir)?; + } else { + warn!( + "{}#{} - Signature verification skipped as no pubkey was found.", + pkg.pkg_name, pkg.pkg_id + ); + } + } + } else { + // Clean up .sig files for packages without signature verification + cleanup_sig_files(&install_dir); + } + + // Checksum verification + if !no_verify { + events.emit(SoarEvent::Verifying { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: VerifyStage::Checksum, + }); + + let final_checksum = if pkg.ghcr_pkg.is_some() { + if real_bin.exists() { + Some(calculate_checksum(&real_bin)?) + } else { + None + } + } else { + downloaded_checksum + }; + + match (final_checksum, pkg.bsum.as_ref()) { + (Some(calculated), Some(expected)) if calculated != *expected => { + events.emit(SoarEvent::Verifying { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: VerifyStage::Failed("checksum mismatch".into()), + }); + return Err(SoarError::Custom(format!( + "{}#{} - Invalid checksum, skipped installation.", + pkg.pkg_name, pkg.pkg_id + ))); + } + (Some(ref calculated), Some(expected)) if calculated == expected => { + events.emit(SoarEvent::Verifying { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: VerifyStage::Passed, + }); + } + _ => {} + } + } + + // Create symlinks + events.emit(SoarEvent::Installing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: InstallStage::LinkingBinaries, + }); + + let symlinks = mangle_package_symlinks( + &install_dir, + &bin_dir, + pkg.provides.as_deref(), + &pkg.pkg_name, + &pkg.version, + target.entrypoint.as_deref(), + target.binaries.as_deref(), + ) + .await?; + + // Desktop integration + if !unlinked || has_desktop_integration(pkg, ctx.config()) { + events.emit(SoarEvent::Installing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: InstallStage::DesktopIntegration, + }); + + let actual_bin = symlinks.first().map(|(src, _)| src.as_path()); + integrate_package( + &install_dir, + pkg, + actual_bin, + eff_portable, + eff_portable_home, + eff_portable_config, + eff_portable_share, + eff_portable_cache, + ) + .await?; + } + + // Record to database + events.emit(SoarEvent::Installing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: InstallStage::RecordingDatabase, + }); + + installer + .record( + unlinked, + eff_portable, + eff_portable_home, + eff_portable_config, + eff_portable_share, + eff_portable_cache, + ) + .await?; + + installer.run_post_install_hook()?; + + events.emit(SoarEvent::OperationComplete { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + }); + + debug!( + pkg_name = pkg.pkg_name, + pkg_id = pkg.pkg_id, + version = pkg.version, + "installation complete" + ); + Ok((install_dir, symlinks)) +} + +fn verify_signatures(pubkey_file: &Path, install_dir: &Path) -> SoarResult<()> { + let pubkey = PublicKey::from_base64( + fs::read_to_string(pubkey_file) + .with_context(|| format!("reading minisign key from {}", pubkey_file.display()))? + .trim(), + ) + .map_err(|err| { + SoarError::Custom(format!( + "Failed to load public key from {}: {}", + pubkey_file.display(), + err + )) + })?; + + let entries = fs::read_dir(install_dir) + .with_context(|| format!("reading package directory {}", install_dir.display()))?; + + for entry in entries { + let path = entry + .with_context(|| format!("reading entry from directory {}", install_dir.display()))? + .path(); + let is_signature_file = path.extension().is_some_and(|ext| ext == "sig"); + let original_file = path.with_extension(""); + if is_signature_file && path.is_file() && original_file.is_file() { + let signature = Signature::from_file(&path).map_err(|err| { + SoarError::Custom(format!( + "Failed to load signature file from {}: {}", + path.display(), + err + )) + })?; + let mut stream_verifier = pubkey.verify_stream(&signature).map_err(|err| { + SoarError::Custom(format!("Failed to setup stream verifier: {err}")) + })?; + + let file = File::open(&original_file).with_context(|| { + format!( + "opening file {} for signature verification", + original_file.display() + ) + })?; + let mut buf_reader = BufReader::new(file); + + let mut buffer = [0u8; 8192]; + loop { + match buf_reader.read(&mut buffer).with_context(|| { + format!("reading to buffer from {}", original_file.display()) + })? { + 0 => break, + n => { + stream_verifier.update(&buffer[..n]); + } + } + } + + stream_verifier.finalize().map_err(|_| { + SoarError::Custom(format!( + "Signature verification failed for {}", + original_file.display() + )) + })?; + + fs::remove_file(&path) + .with_context(|| format!("removing minisign file {}", path.display()))?; + } + } + + Ok(()) +} + +fn cleanup_sig_files(install_dir: &Path) { + if let Ok(entries) = fs::read_dir(install_dir) { + for entry in entries.filter_map(|e| e.ok()) { + let path = entry.path(); + if path.extension().is_some_and(|ext| ext == "sig") && path.is_file() { + fs::remove_file(&path).ok(); + } + } + } +} diff --git a/crates/soar-operations/src/lib.rs b/crates/soar-operations/src/lib.rs new file mode 100644 index 00000000..811e6d4d --- /dev/null +++ b/crates/soar-operations/src/lib.rs @@ -0,0 +1,18 @@ +pub mod context; +pub mod progress; +pub mod types; +pub mod utils; + +pub mod apply; +pub mod health; +pub mod install; +pub mod list; +pub mod remove; +pub mod run; +pub mod search; +pub mod switch; +pub mod sync; +pub mod update; + +pub use context::SoarContext; +pub use types::*; diff --git a/crates/soar-operations/src/list.rs b/crates/soar-operations/src/list.rs new file mode 100644 index 00000000..cd4d0408 --- /dev/null +++ b/crates/soar-operations/src/list.rs @@ -0,0 +1,150 @@ +use std::{collections::HashMap, path::PathBuf}; + +use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use soar_core::{ + database::models::{InstalledPackage, Package}, + SoarResult, +}; +use soar_db::{ + models::metadata::PackageListing, + repository::{core::CoreRepository, metadata::MetadataRepository}, +}; +use soar_utils::fs::dir_size; +use tracing::{debug, trace}; + +use crate::{ + InstalledEntry, InstalledListResult, PackageListEntry, PackageListResult, SoarContext, +}; + +/// List all available packages, optionally filtered by repository. +pub async fn list_packages( + ctx: &SoarContext, + repo_name: Option<&str>, +) -> SoarResult { + debug!(repo = ?repo_name, "listing packages"); + let metadata_mgr = ctx.metadata_manager().await?; + let diesel_db = ctx.diesel_core_db()?; + + struct ListingWithRepo { + repo_name: String, + pkg: PackageListing, + } + + let packages: Vec = if let Some(repo_name) = repo_name { + metadata_mgr + .query_repo(repo_name, MetadataRepository::list_all_minimal)? + .unwrap_or_default() + .into_iter() + .map(|pkg| { + ListingWithRepo { + repo_name: repo_name.to_string(), + pkg, + } + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::list_all_minimal(conn)?; + Ok(pkgs + .into_iter() + .map(|pkg| { + ListingWithRepo { + repo_name: repo_name.to_string(), + pkg, + } + }) + .collect()) + })? + }; + + let installed_pkgs: HashMap<(String, String, String), bool> = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered(conn, None, None, None, None, None, None, None, None) + })? + .into_par_iter() + .map(|pkg| ((pkg.repo_name, pkg.pkg_id, pkg.pkg_name), pkg.is_installed)) + .collect(); + + let total = packages.len(); + + let entries: Vec = packages + .into_iter() + .map(|entry| { + let key = ( + entry.repo_name.clone(), + entry.pkg.pkg_id.clone(), + entry.pkg.pkg_name.clone(), + ); + let installed = installed_pkgs.get(&key).copied().unwrap_or(false); + + // Build a minimal Package for the entry + let package = Package { + repo_name: entry.repo_name, + pkg_id: entry.pkg.pkg_id, + pkg_name: entry.pkg.pkg_name, + pkg_type: entry.pkg.pkg_type, + version: entry.pkg.version, + ..Default::default() + }; + + PackageListEntry { + package, + installed, + } + }) + .collect(); + + Ok(PackageListResult { + packages: entries, + total, + }) +} + +/// List installed packages, optionally filtered by repository. +pub fn list_installed( + ctx: &SoarContext, + repo_name: Option<&str>, +) -> SoarResult { + debug!(repo = ?repo_name, "listing installed packages"); + let diesel_db = ctx.diesel_core_db()?; + + let packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered(conn, repo_name, None, None, None, None, None, None, None) + })? + .into_iter() + .map(Into::into) + .collect(); + trace!(count = packages.len(), "fetched installed packages"); + + let mut total_size = 0u64; + let total_count = packages.len(); + + let entries: Vec = packages + .into_iter() + .map(|package| { + let installed_path = PathBuf::from(&package.installed_path); + let disk_size = dir_size(&installed_path).unwrap_or(0); + let is_healthy = package.is_installed && installed_path.exists(); + total_size += disk_size; + + InstalledEntry { + package, + disk_size, + is_healthy, + } + }) + .collect(); + + Ok(InstalledListResult { + packages: entries, + total_count, + total_size, + }) +} + +/// Count distinct installed packages. +pub fn count_installed(ctx: &SoarContext, repo_name: Option<&str>) -> SoarResult { + let diesel_db = ctx.diesel_core_db()?; + diesel_db.with_conn(|conn| CoreRepository::count_distinct_installed(conn, repo_name)) +} diff --git a/crates/soar-operations/src/progress.rs b/crates/soar-operations/src/progress.rs new file mode 100644 index 00000000..6e741b85 --- /dev/null +++ b/crates/soar-operations/src/progress.rs @@ -0,0 +1,174 @@ +use std::sync::{ + atomic::{AtomicU64, Ordering}, + Arc, +}; + +use soar_dl::types::Progress; +use soar_events::{EventSinkHandle, OperationId, SoarEvent}; + +/// Creates a soar-dl progress callback that bridges to SoarEvent emissions. +/// +/// The returned closure can be passed to `PackageInstaller::new()` as the +/// `progress_callback` parameter. +pub fn create_progress_bridge( + events: EventSinkHandle, + op_id: OperationId, + pkg_name: String, + pkg_id: String, +) -> Arc { + Arc::new(move |progress| { + let event = match progress { + Progress::Starting { + total, + } => { + SoarEvent::DownloadStarting { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + total, + } + } + Progress::Resuming { + current, + total, + } => { + SoarEvent::DownloadResuming { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + current, + total, + } + } + Progress::Chunk { + current, + total, + } => { + SoarEvent::DownloadProgress { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + current, + total, + } + } + Progress::Complete { + total, + } => { + SoarEvent::DownloadComplete { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + total, + } + } + Progress::Error => { + SoarEvent::DownloadRetry { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + } + } + Progress::Aborted => { + SoarEvent::DownloadAborted { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + } + } + Progress::Recovered => { + SoarEvent::DownloadRecovered { + op_id, + pkg_name: pkg_name.clone(), + pkg_id: pkg_id.clone(), + } + } + }; + events.emit(event); + }) +} + +/// Generates a unique operation ID. +pub fn next_op_id() -> OperationId { + static COUNTER: AtomicU64 = AtomicU64::new(1); + COUNTER.fetch_add(1, Ordering::Relaxed) +} + +#[cfg(test)] +mod tests { + use soar_events::{CollectorSink, SoarEvent}; + + use super::*; + + #[test] + fn test_next_op_id_is_unique() { + let id1 = next_op_id(); + let id2 = next_op_id(); + let id3 = next_op_id(); + assert_ne!(id1, id2); + assert_ne!(id2, id3); + } + + #[test] + fn test_progress_bridge_maps_all_variants() { + let collector = Arc::new(CollectorSink::default()); + let events: EventSinkHandle = collector.clone(); + + let bridge = create_progress_bridge(events, 1, "pkg".into(), "pkg-id".into()); + + bridge(Progress::Starting { + total: 1000, + }); + bridge(Progress::Resuming { + current: 500, + total: 1000, + }); + bridge(Progress::Chunk { + current: 750, + total: 1000, + }); + bridge(Progress::Complete { + total: 1000, + }); + bridge(Progress::Error); + bridge(Progress::Aborted); + bridge(Progress::Recovered); + + let events = collector.events(); + assert_eq!(events.len(), 7); + + assert!(matches!( + &events[0], + SoarEvent::DownloadStarting { + total: 1000, + .. + } + )); + assert!(matches!( + &events[1], + SoarEvent::DownloadResuming { + current: 500, + total: 1000, + .. + } + )); + assert!(matches!( + &events[2], + SoarEvent::DownloadProgress { + current: 750, + total: 1000, + .. + } + )); + assert!(matches!( + &events[3], + SoarEvent::DownloadComplete { + total: 1000, + .. + } + )); + assert!(matches!(&events[4], SoarEvent::DownloadRetry { .. })); + assert!(matches!(&events[5], SoarEvent::DownloadAborted { .. })); + assert!(matches!(&events[6], SoarEvent::DownloadRecovered { .. })); + } +} diff --git a/crates/soar-operations/src/remove.rs b/crates/soar-operations/src/remove.rs new file mode 100644 index 00000000..cf282633 --- /dev/null +++ b/crates/soar-operations/src/remove.rs @@ -0,0 +1,233 @@ +use soar_core::{ + database::models::InstalledPackage, + package::{query::PackageQuery, remove::PackageRemover}, + SoarResult, +}; +use soar_db::repository::core::{CoreRepository, SortDirection}; +use soar_events::{RemoveStage, SoarEvent}; +use tracing::{debug, trace}; + +use crate::{ + progress::next_op_id, utils::get_package_hooks, FailedInfo, RemoveReport, RemoveResolveResult, + RemovedInfo, SoarContext, +}; + +/// Resolve package queries into packages to remove. +/// +/// For each query, returns a [`RemoveResolveResult`] indicating whether the +/// package was found, is ambiguous, or not installed. +pub fn resolve_removals( + ctx: &SoarContext, + packages: &[String], + all: bool, +) -> SoarResult> { + debug!( + count = packages.len(), + all = all, + "resolving packages for removal" + ); + let diesel_db = ctx.diesel_core_db()?; + + let mut results = Vec::with_capacity(packages.len()); + + for package in packages { + let query = PackageQuery::try_from(package.as_str())?; + + // --all flag: remove all installed variants matching the name + if let (true, None, Some(ref name)) = (all, &query.pkg_id, &query.name) { + let installed: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + query.name.as_deref(), + None, + query.version.as_deref(), + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + if installed.is_empty() { + results.push(RemoveResolveResult::NotInstalled(name.clone())); + } else { + results.push(RemoveResolveResult::Resolved(installed)); + } + continue; + } + + // Handle #all: remove all packages with the selected pkg_id + if let Some(ref pkg_id) = query.pkg_id { + if pkg_id == "all" { + let installed: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + query.name.as_deref(), + None, + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + if installed.is_empty() { + results.push(RemoveResolveResult::NotInstalled( + query.name.clone().unwrap_or_default(), + )); + } else if installed.len() > 1 { + // Multiple pkg_ids → ambiguous, caller picks which pkg_id + results.push(RemoveResolveResult::Ambiguous { + query: query.name.clone().unwrap_or_default(), + candidates: installed, + }); + } else { + let target_pkg_id = installed[0].pkg_id.clone(); + // Find all packages with this pkg_id + let all_installed: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + None, + Some(&target_pkg_id), + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + results.push(RemoveResolveResult::Resolved(all_installed)); + } + continue; + } + } + + // Normal case: find matching installed packages + let installed_pkgs: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + query.name.as_deref(), + query.pkg_id.as_deref(), + query.version.as_deref(), + None, + None, + None, + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + if installed_pkgs.is_empty() { + results.push(RemoveResolveResult::NotInstalled(package.clone())); + } else if installed_pkgs.len() > 1 && query.pkg_id.is_none() { + results.push(RemoveResolveResult::Ambiguous { + query: query.name.clone().unwrap_or(package.clone()), + candidates: installed_pkgs, + }); + } else { + results.push(RemoveResolveResult::Resolved(installed_pkgs)); + } + } + + Ok(results) +} + +/// Remove installed packages. Emits events through the context's event sink. +pub async fn perform_removal( + ctx: &SoarContext, + packages: Vec, +) -> SoarResult { + debug!(count = packages.len(), "performing removal"); + let diesel_db = ctx.diesel_core_db()?.clone(); + + let mut removed = Vec::new(); + let mut failed = Vec::new(); + + for pkg in packages { + let op_id = next_op_id(); + + ctx.events().emit(SoarEvent::Removing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: RemoveStage::RunningHook("pre_remove".into()), + }); + + trace!( + pkg_name = pkg.pkg_name, + pkg_id = pkg.pkg_id, + "removing package" + ); + + let (hooks, sandbox) = get_package_hooks(&pkg.pkg_name); + let remover = PackageRemover::new(pkg.clone(), diesel_db.clone()) + .await + .with_hooks(hooks) + .with_sandbox(sandbox); + + match remover.remove().await { + Ok(()) => { + ctx.events().emit(SoarEvent::Removing { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + stage: RemoveStage::Complete { + size_freed: None, + }, + }); + ctx.events().emit(SoarEvent::OperationComplete { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + }); + + removed.push(RemovedInfo { + pkg_name: pkg.pkg_name, + pkg_id: pkg.pkg_id, + repo_name: pkg.repo_name, + version: pkg.version, + }); + } + Err(err) => { + ctx.events().emit(SoarEvent::OperationFailed { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + error: err.to_string(), + }); + + failed.push(FailedInfo { + pkg_name: pkg.pkg_name, + pkg_id: pkg.pkg_id, + error: err.to_string(), + }); + } + } + } + + Ok(RemoveReport { + removed, + failed, + }) +} diff --git a/crates/soar-operations/src/run.rs b/crates/soar-operations/src/run.rs new file mode 100644 index 00000000..77ea586f --- /dev/null +++ b/crates/soar-operations/src/run.rs @@ -0,0 +1,243 @@ +use std::{fs, path::Path, process::Command, sync::Arc}; + +use soar_core::{ + database::models::Package, + error::{ErrorContext, SoarError}, + package::query::PackageQuery, + utils::get_extract_dir, + SoarResult, +}; +use soar_db::repository::metadata::MetadataRepository; +use soar_dl::{download::Download, oci::OciDownload, types::OverwriteMode}; +use soar_events::SoarEvent; +use soar_utils::hash::calculate_checksum; +use tracing::debug; + +use crate::{ + progress::{create_progress_bridge, next_op_id}, + AmbiguousPackage, PrepareRunResult, RunResult, SoarContext, +}; + +/// Resolve a package and download it to the cache if needed. +/// +/// Returns [`PrepareRunResult::Ready`] with the path to the cached binary, +/// or [`PrepareRunResult::Ambiguous`] if multiple candidates match. +pub async fn prepare_run( + ctx: &SoarContext, + package_name: &str, + repo_name: Option<&str>, + pkg_id: Option<&str>, +) -> SoarResult { + debug!(package_name = package_name, "preparing run"); + let config = ctx.config(); + let cache_bin = config.get_cache_path()?.join("bin"); + + let query = PackageQuery::try_from(package_name)?; + let package_name = query.name.as_deref().unwrap_or(package_name); + let repo_name = query.repo_name.as_deref().or(repo_name); + let pkg_id = query.pkg_id.as_deref().or(pkg_id); + let version = query.version.as_deref(); + + let output_path = cache_bin.join(package_name); + if output_path.exists() { + return Ok(PrepareRunResult::Ready(output_path)); + } + + let metadata_mgr = ctx.metadata_manager().await?; + + let packages: Vec = if let Some(repo_name) = repo_name { + metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + Some(package_name), + pkg_id, + None, + None, + None, + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + Some(package_name), + pkg_id, + None, + None, + None, + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + })? + }; + + let packages: Vec = if let Some(version) = version { + packages + .into_iter() + .filter(|p| p.has_version(version)) + .collect() + } else { + packages + }; + + match packages.len() { + 0 => return Err(SoarError::PackageNotFound(package_name.to_string())), + 1 => {} + _ => { + return Ok(PrepareRunResult::Ambiguous(AmbiguousPackage { + query: package_name.to_string(), + candidates: packages, + })); + } + } + + let package = packages.into_iter().next().unwrap().resolve(version); + + fs::create_dir_all(&cache_bin) + .with_context(|| format!("creating directory {}", cache_bin.display()))?; + + let op_id = next_op_id(); + let progress_callback = create_progress_bridge( + ctx.events().clone(), + op_id, + package.pkg_name.clone(), + package.pkg_id.clone(), + ); + + download_to_cache(&package, &output_path, &cache_bin, progress_callback)?; + + // Checksum verification + let checksum = calculate_checksum(&output_path)?; + if let Some(ref bsum) = package.bsum { + if checksum != *bsum { + ctx.events().emit(SoarEvent::Log { + level: soar_events::LogLevel::Warning, + message: format!( + "Checksum mismatch for {}: expected {}, got {}", + package.pkg_name, bsum, checksum + ), + }); + return Err(SoarError::InvalidChecksum); + } + } + + Ok(PrepareRunResult::Ready(output_path)) +} + +/// Execute a binary with the given arguments. +pub fn execute_binary(path: &Path, args: &[String]) -> SoarResult { + debug!(path = %path.display(), args = ?args, "executing binary"); + + let status = Command::new(path) + .args(args) + .status() + .with_context(|| format!("executing command {}", path.display()))?; + + Ok(RunResult { + exit_code: status.code().unwrap_or(-1), + }) +} + +fn download_to_cache( + package: &Package, + output_path: &Path, + cache_bin: &Path, + progress_callback: Arc, +) -> SoarResult<()> { + if let Some(ref url) = package.ghcr_blob { + let cb = progress_callback.clone(); + let mut dl = OciDownload::new(url.as_str()) + .output(output_path.to_string_lossy()) + .overwrite(OverwriteMode::Force); + dl = dl.progress(move |p| { + cb(p); + }); + dl.execute()?; + } else { + let extract_dir = get_extract_dir(cache_bin); + let cb = progress_callback.clone(); + let mut dl = Download::new(&package.download_url) + .output(output_path.to_string_lossy()) + .overwrite(OverwriteMode::Force) + .extract(true) + .extract_to(&extract_dir); + dl = dl.progress(move |p| { + cb(p); + }); + + let file_name = dl.execute()?; + if extract_dir.exists() { + fs::remove_file(&file_name).ok(); + + let mut extracted = Vec::new(); + for entry in fs::read_dir(&extract_dir) + .with_context(|| format!("reading {} directory", extract_dir.display()))? + { + let entry = entry.with_context(|| { + format!("reading entry from directory {}", extract_dir.display()) + })?; + let from = entry.path(); + let to = cache_bin.join(entry.file_name()); + fs::rename(&from, &to) + .with_context(|| format!("renaming {} to {}", from.display(), to.display()))?; + extracted.push(to); + } + + fs::remove_dir_all(&extract_dir).ok(); + + if extracted.is_empty() { + return Err(SoarError::Custom(format!( + "Archive contained no files for '{}'", + output_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + ))); + } + + if !output_path.exists() { + if extracted.len() == 1 { + // Single extracted file didn't match the package name; rename it. + fs::rename(&extracted[0], output_path).with_context(|| { + format!( + "renaming {} to {}", + extracted[0].display(), + output_path.display() + ) + })?; + } else if extracted.len() > 1 { + return Err(SoarError::Custom(format!( + "Archive extracted {} files but none matched '{}'. Extracted: {}", + extracted.len(), + output_path + .file_name() + .unwrap_or_default() + .to_string_lossy(), + extracted + .iter() + .filter_map(|p| p.file_name().map(|n| n.to_string_lossy().to_string())) + .collect::>() + .join(", ") + ))); + } + } + } + } + + Ok(()) +} diff --git a/crates/soar-operations/src/search.rs b/crates/soar-operations/src/search.rs new file mode 100644 index 00000000..bbc921d0 --- /dev/null +++ b/crates/soar-operations/src/search.rs @@ -0,0 +1,152 @@ +use std::collections::HashMap; + +use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use soar_config::config::get_config; +use soar_core::{database::models::Package, package::query::PackageQuery, SoarResult}; +use soar_db::repository::{ + core::{CoreRepository, SortDirection}, + metadata::MetadataRepository, +}; +use tracing::{debug, trace}; + +use crate::{SearchEntry, SearchResult, SoarContext}; + +/// Search for packages across all repositories. +pub async fn search_packages( + ctx: &SoarContext, + query: &str, + case_sensitive: bool, + limit: Option, +) -> SoarResult { + debug!( + query = query, + case_sensitive = case_sensitive, + limit = ?limit, + "searching packages" + ); + let metadata_mgr = ctx.metadata_manager().await?; + let diesel_db = ctx.diesel_core_db()?; + + let search_limit = limit.or(get_config().search_limit).unwrap_or(20) as i64; + trace!(search_limit = search_limit, "using search limit"); + + let packages: Vec = metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = if case_sensitive { + MetadataRepository::search_case_sensitive(conn, query, Some(search_limit))? + } else { + MetadataRepository::search(conn, query, Some(search_limit))? + }; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + })?; + + let installed_pkgs: HashMap<(String, String, String), bool> = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered(conn, None, None, None, None, None, None, None, None) + })? + .into_par_iter() + .map(|pkg| ((pkg.repo_name, pkg.pkg_id, pkg.pkg_name), pkg.is_installed)) + .collect(); + + let total_count = packages.len(); + + let entries: Vec = packages + .into_iter() + .take(search_limit as usize) + .map(|package| { + let key = ( + package.repo_name.clone(), + package.pkg_id.clone(), + package.pkg_name.clone(), + ); + let installed = installed_pkgs.get(&key).copied().unwrap_or(false); + + SearchEntry { + package, + installed, + } + }) + .collect(); + + Ok(SearchResult { + packages: entries, + total_count, + }) +} + +/// Query detailed package information. +/// +/// Accepts query strings in the format `name#pkg_id@version:repo`. +/// Returns all matching packages with full metadata. +pub async fn query_package(ctx: &SoarContext, query_str: &str) -> SoarResult> { + debug!(query = query_str, "querying package info"); + let metadata_mgr = ctx.metadata_manager().await?; + + let query = PackageQuery::try_from(query_str)?; + trace!( + name = ?query.name, + pkg_id = ?query.pkg_id, + version = ?query.version, + repo = ?query.repo_name, + "parsed query" + ); + + let packages: Vec = if let Some(ref repo_name) = query.repo_name { + metadata_mgr + .query_repo(repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + query.name.as_deref(), + query.pkg_id.as_deref(), + None, + None, + Some(SortDirection::Asc), + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.clone(); + pkg + }) + .collect() + } else { + metadata_mgr.query_all_flat(|repo_name, conn| { + let pkgs = MetadataRepository::find_filtered( + conn, + query.name.as_deref(), + query.pkg_id.as_deref(), + None, + None, + Some(SortDirection::Asc), + )?; + Ok(pkgs + .into_iter() + .map(|p| { + let mut pkg: Package = p.into(); + pkg.repo_name = repo_name.to_string(); + pkg + }) + .collect()) + })? + }; + + let packages: Vec = if let Some(ref version) = query.version { + packages + .into_iter() + .filter(|p| p.has_version(version)) + .map(|p| p.resolve(query.version.as_deref())) + .collect() + } else { + packages + }; + + Ok(packages) +} diff --git a/crates/soar-operations/src/switch.rs b/crates/soar-operations/src/switch.rs new file mode 100644 index 00000000..e05fe478 --- /dev/null +++ b/crates/soar-operations/src/switch.rs @@ -0,0 +1,171 @@ +use std::path::PathBuf; + +use soar_core::{ + database::models::{InstalledPackage, Package}, + error::SoarError, + SoarResult, +}; +use soar_db::repository::{ + core::{CoreRepository, SortDirection}, + metadata::MetadataRepository, +}; +use soar_package::{formats::common::setup_portable_dir, integrate_package}; +use tracing::debug; + +use crate::{ + utils::{has_desktop_integration, mangle_package_symlinks}, + SoarContext, VariantInfo, +}; + +/// List all installed variants (pkg_ids) for a given package name. +pub fn list_variants(ctx: &SoarContext, name: &str) -> SoarResult> { + debug!(name = name, "listing variants"); + let diesel_db = ctx.diesel_core_db()?; + + let packages = diesel_db.with_conn(|conn| { + CoreRepository::list_filtered( + conn, + None, + Some(name), + None, + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })?; + + Ok(packages + .into_iter() + .map(|p| { + let is_active = !p.unlinked; + let package = p.into(); + VariantInfo { + package, + is_active, + } + }) + .collect()) +} + +/// Switch the active variant for a package name. +/// +/// `selected_index` is the 0-based index into the list returned by [`list_variants`]. +/// This unlinks all other variants and links the selected one, including +/// re-creating symlinks and desktop integration. +pub async fn switch_variant( + ctx: &SoarContext, + name: &str, + selected_index: usize, +) -> SoarResult<()> { + debug!(name = name, index = selected_index, "switching variant"); + let diesel_db = ctx.diesel_core_db()?; + + let packages = diesel_db.with_conn(|conn| { + CoreRepository::list_filtered( + conn, + None, + Some(name), + None, + None, + None, + None, + None, + Some(SortDirection::Asc), + ) + })?; + + let selected_package = packages + .into_iter() + .nth(selected_index) + .ok_or_else(|| SoarError::Custom("Invalid variant index".into()))?; + + let pkg_name = &selected_package.pkg_name; + let pkg_id = &selected_package.pkg_id; + let checksum = selected_package.checksum.as_deref(); + + // Atomically unlink other variants and link the selected one so the DB + // is never left in a state where all variants are unlinked. + diesel_db.transaction(|conn| { + CoreRepository::unlink_others_by_checksum(conn, pkg_name, pkg_id, checksum)?; + CoreRepository::link_by_checksum(conn, pkg_name, pkg_id, checksum) + })?; + + let config = ctx.config(); + let bin_dir = config.get_bin_path()?; + let install_dir = PathBuf::from(&selected_package.installed_path); + + // Re-create symlinks + let symlinks = mangle_package_symlinks( + &install_dir, + &bin_dir, + selected_package.provides.as_deref(), + &selected_package.pkg_name, + &selected_package.version, + None, + None, + ) + .await?; + + let actual_bin = symlinks.first().map(|(src, _)| src.as_path()); + + // Check if desktop integration is needed + let metadata_mgr = ctx.metadata_manager().await?; + let pkg: Vec = metadata_mgr + .query_repo(&selected_package.repo_name, |conn| { + MetadataRepository::find_filtered( + conn, + Some(name), + Some(&selected_package.pkg_id), + None, + Some(1), + None, + ) + })? + .unwrap_or_default() + .into_iter() + .map(|p| { + let mut package: Package = p.into(); + package.repo_name = selected_package.repo_name.clone(); + package + }) + .collect(); + + let installed_pkg: InstalledPackage = selected_package.into(); + + let has_portable = installed_pkg.portable_path.is_some() + || installed_pkg.portable_home.is_some() + || installed_pkg.portable_config.is_some() + || installed_pkg.portable_share.is_some() + || installed_pkg.portable_cache.is_some(); + + if !pkg.is_empty() && pkg.iter().all(|p| has_desktop_integration(p, config)) { + integrate_package( + &install_dir, + &installed_pkg, + actual_bin, + installed_pkg.portable_path.as_deref(), + installed_pkg.portable_home.as_deref(), + installed_pkg.portable_config.as_deref(), + installed_pkg.portable_share.as_deref(), + installed_pkg.portable_cache.as_deref(), + ) + .await?; + } else if has_portable { + let bin_path = actual_bin + .map(|p| p.to_path_buf()) + .unwrap_or_else(|| install_dir.join(&installed_pkg.pkg_name)); + setup_portable_dir( + &bin_path, + &installed_pkg, + installed_pkg.portable_path.as_deref(), + installed_pkg.portable_home.as_deref(), + installed_pkg.portable_config.as_deref(), + installed_pkg.portable_share.as_deref(), + installed_pkg.portable_cache.as_deref(), + )?; + } + + Ok(()) +} diff --git a/crates/soar-operations/src/sync.rs b/crates/soar-operations/src/sync.rs new file mode 100644 index 00000000..b9c91a17 --- /dev/null +++ b/crates/soar-operations/src/sync.rs @@ -0,0 +1,11 @@ +use soar_core::SoarResult; + +use crate::SoarContext; + +/// Sync all enabled repositories. +/// +/// Emits `SoarEvent::SyncProgress` events through the context's event sink. +/// This is a convenience wrapper around `SoarContext::sync()`. +pub async fn sync_repos(ctx: &SoarContext) -> SoarResult<()> { + ctx.sync().await +} diff --git a/crates/soar-operations/src/types.rs b/crates/soar-operations/src/types.rs new file mode 100644 index 00000000..6e6c3840 --- /dev/null +++ b/crates/soar-operations/src/types.rs @@ -0,0 +1,227 @@ +use std::path::PathBuf; + +use soar_core::{ + database::models::{InstalledPackage, Package}, + package::install::InstallTarget, +}; + +// ---- Install ---- + +/// Options for an install operation. +#[derive(Debug, Default)] +pub struct InstallOptions { + pub force: bool, + pub portable: Option, + pub portable_home: Option, + pub portable_config: Option, + pub portable_share: Option, + pub portable_cache: Option, + pub binary_only: bool, + pub no_verify: bool, + pub name_override: Option, + pub version_override: Option, + pub pkg_type_override: Option, + pub pkg_id_override: Option, +} + +/// Result of resolving a single package query. +pub enum ResolveResult { + /// Resolved to install targets. + Resolved(Vec), + /// Multiple candidates found; caller must pick one. + Ambiguous(AmbiguousPackage), + /// Package not found. + NotFound(String), + /// Already installed (and not --force). + AlreadyInstalled { + pkg_name: String, + pkg_id: String, + repo_name: String, + version: String, + }, +} + +/// Multiple matching packages for a query. +pub struct AmbiguousPackage { + pub query: String, + pub candidates: Vec, +} + +/// Report returned after installation completes. +pub struct InstallReport { + pub installed: Vec, + pub failed: Vec, + pub warnings: Vec, +} + +/// Info about a successfully installed package. +#[derive(Debug)] +pub struct InstalledInfo { + pub pkg_name: String, + pub pkg_id: String, + pub repo_name: String, + pub version: String, + pub install_dir: PathBuf, + pub symlinks: Vec<(PathBuf, PathBuf)>, + pub notes: Option>, +} + +/// Info about a failed operation. +#[derive(Debug)] +pub struct FailedInfo { + pub pkg_name: String, + pub pkg_id: String, + pub error: String, +} + +// ---- Remove ---- + +/// Result of resolving packages for removal. +pub enum RemoveResolveResult { + Resolved(Vec), + Ambiguous { + query: String, + candidates: Vec, + }, + NotInstalled(String), +} + +pub struct RemoveReport { + pub removed: Vec, + pub failed: Vec, +} + +pub struct RemovedInfo { + pub pkg_name: String, + pub pkg_id: String, + pub repo_name: String, + pub version: String, +} + +// ---- Update ---- + +pub struct UpdateInfo { + pub pkg_name: String, + pub pkg_id: String, + pub repo_name: String, + pub current_version: String, + pub new_version: String, + pub target: InstallTarget, + pub update_toml_url: Option, +} + +pub struct UpdateReport { + pub updated: Vec, + pub failed: Vec, + pub url_updates: Vec, +} + +/// Tracks URL packages that need their packages.toml updated after successful update. +pub struct UrlUpdateInfo { + pub pkg_name: String, + pub new_version: String, + pub new_url: Option, +} + +// ---- Search / List ---- + +pub struct SearchResult { + pub packages: Vec, + pub total_count: usize, +} + +pub struct SearchEntry { + pub package: Package, + pub installed: bool, +} + +pub struct PackageListResult { + pub packages: Vec, + pub total: usize, +} + +pub struct PackageListEntry { + pub package: Package, + pub installed: bool, +} + +pub struct InstalledListResult { + pub packages: Vec, + pub total_count: usize, + pub total_size: u64, +} + +pub struct InstalledEntry { + pub package: InstalledPackage, + pub disk_size: u64, + pub is_healthy: bool, +} + +// ---- Health ---- + +pub struct HealthReport { + pub path_configured: bool, + pub bin_path: PathBuf, + pub broken_packages: Vec, + pub broken_symlinks: Vec, +} + +pub struct BrokenPackage { + pub pkg_name: String, + pub pkg_id: String, + pub installed_path: String, +} + +// ---- Apply ---- + +/// Result of comparing declared packages vs installed packages. +#[derive(Default)] +pub struct ApplyDiff { + /// Packages to install (declared but not installed). + pub to_install: Vec<(soar_config::packages::ResolvedPackage, InstallTarget)>, + /// Packages to update (version mismatch). + pub to_update: Vec<(soar_config::packages::ResolvedPackage, InstallTarget)>, + /// Packages to remove (installed but not declared, only with --prune). + pub to_remove: Vec, + /// Packages already in sync. + pub in_sync: Vec, + /// Packages not found in metadata. + pub not_found: Vec, + /// Pending version updates for packages.toml (package_name, version). + pub pending_version_updates: Vec<(String, String)>, +} + +impl ApplyDiff { + pub fn has_changes(&self) -> bool { + !self.to_install.is_empty() || !self.to_update.is_empty() || !self.to_remove.is_empty() + } + + pub fn has_toml_updates(&self) -> bool { + !self.pending_version_updates.is_empty() + } +} + +pub struct ApplyReport { + pub installed_count: usize, + pub updated_count: usize, + pub removed_count: usize, + pub failed_count: usize, +} + +// ---- Run ---- + +pub enum PrepareRunResult { + Ready(PathBuf), + Ambiguous(AmbiguousPackage), +} + +pub struct RunResult { + pub exit_code: i32, +} + +// ---- Switch (use) ---- + +pub struct VariantInfo { + pub package: InstalledPackage, + pub is_active: bool, +} diff --git a/crates/soar-operations/src/update.rs b/crates/soar-operations/src/update.rs new file mode 100644 index 00000000..0bef6fd8 --- /dev/null +++ b/crates/soar-operations/src/update.rs @@ -0,0 +1,534 @@ +use std::collections::HashSet; + +use soar_config::packages::{PackagesConfig, ResolvedPackage}; +use soar_core::{ + database::{ + connection::DieselDatabase, + models::{InstalledPackage, Package}, + }, + package::{ + install::InstallTarget, + query::PackageQuery, + release_source::{run_version_command, ReleaseSource}, + update::remove_old_versions, + url::UrlPackage, + }, + utils::substitute_placeholders, + SoarResult, +}; +use soar_db::repository::{ + core::{CoreRepository, SortDirection}, + metadata::MetadataRepository, +}; +use soar_events::{SoarEvent, UpdateCheckStatus, UpdateCleanupStage}; +use tracing::{debug, warn}; + +use crate::{ + install::perform_installation, progress::next_op_id, InstallOptions, SoarContext, UpdateInfo, + UpdateReport, UrlUpdateInfo, +}; + +/// Check for available updates. +/// +/// If `packages` is `Some`, only checks the specified packages. +/// If `None`, checks all updatable packages. +pub async fn check_updates( + ctx: &SoarContext, + packages: Option<&[String]>, +) -> SoarResult> { + debug!("checking for updates"); + let metadata_mgr = ctx.metadata_manager().await?; + let diesel_db = ctx.diesel_core_db()?.clone(); + + let packages_config = PackagesConfig::load(None).ok(); + let resolved_packages = packages_config + .as_ref() + .map(|c| c.resolved_packages()) + .unwrap_or_default(); + + let mut updates = Vec::new(); + + if let Some(packages) = packages { + for package in packages { + let query = PackageQuery::try_from(package.as_str())?; + + let installed_pkgs: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + query.repo_name.as_deref(), + query.name.as_deref(), + query.pkg_id.as_deref(), + query.version.as_deref(), + Some(true), + None, + Some(1), + Some(SortDirection::Asc), + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + for pkg in installed_pkgs { + if pkg.repo_name == "local" { + if let Some(update_info) = check_local_update(&pkg, &resolved_packages, ctx)? { + updates.push(update_info); + } + continue; + } + + if let Some(update_info) = check_repo_update(&pkg, metadata_mgr, &diesel_db, ctx)? { + updates.push(update_info); + } + } + } + } else { + // Check all updatable packages + let installed_packages: Vec = diesel_db + .with_conn(CoreRepository::list_updatable)? + .into_iter() + .map(Into::into) + .collect(); + + let local_packages: Vec = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some("local"), + None, + None, + None, + Some(true), + None, + None, + None, + ) + })? + .into_iter() + .map(Into::into) + .collect(); + + for pkg in local_packages { + if let Some(update_info) = check_local_update(&pkg, &resolved_packages, ctx)? { + updates.push(update_info); + } + } + + for pkg in installed_packages { + if pkg.repo_name == "local" { + continue; + } + + if let Some(update_info) = check_repo_update(&pkg, metadata_mgr, &diesel_db, ctx)? { + updates.push(update_info); + } + } + } + + Ok(updates) +} + +fn check_repo_update( + pkg: &InstalledPackage, + metadata_mgr: &soar_core::database::connection::MetadataManager, + diesel_db: &DieselDatabase, + ctx: &SoarContext, +) -> SoarResult> { + let new_pkg: Option = metadata_mgr + .query_repo(&pkg.repo_name, |conn| { + MetadataRepository::find_newer_version(conn, &pkg.pkg_name, &pkg.pkg_id, &pkg.version) + })? + .flatten() + .map(|p| { + let mut package: Package = p.into(); + package.repo_name = pkg.repo_name.clone(); + package + }); + + let Some(package) = new_pkg else { + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::UpToDate { + version: pkg.version.clone(), + }, + }); + return Ok(None); + }; + + // Check if the new version is already installed + let new_version_installed = get_existing(&package, diesel_db)?; + if let Some(ref installed) = new_version_installed { + if installed.is_installed { + return Ok(None); + } + } + + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::Available { + current_version: pkg.version.clone(), + new_version: package.version.clone(), + }, + }); + + Ok(Some(UpdateInfo { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + repo_name: pkg.repo_name.clone(), + current_version: pkg.version.clone(), + new_version: package.version.clone(), + target: InstallTarget { + package, + existing_install: Some(pkg.clone()), + pinned: pkg.pinned, + profile: Some(pkg.profile.clone()), + portable: pkg.portable_path.clone(), + portable_home: pkg.portable_home.clone(), + portable_config: pkg.portable_config.clone(), + portable_share: pkg.portable_share.clone(), + portable_cache: pkg.portable_cache.clone(), + ..Default::default() + }, + update_toml_url: None, + })) +} + +fn check_local_update( + pkg: &InstalledPackage, + resolved_packages: &[ResolvedPackage], + ctx: &SoarContext, +) -> SoarResult> { + let resolved = resolved_packages + .iter() + .find(|r| r.name == pkg.pkg_name && has_update_source(r)); + + let Some(resolved) = resolved else { + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::Skipped { + reason: "no update source configured".into(), + }, + }); + return Ok(None); + }; + + if resolved.pinned { + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::Skipped { + reason: "pinned".into(), + }, + }); + return Ok(None); + } + + let is_github_or_gitlab = resolved.github.is_some() || resolved.gitlab.is_some(); + + let (version, download_url, size, update_toml_url) = + if let Some(ref cmd) = resolved.version_command { + let result = match run_version_command(cmd) { + Ok(r) => r, + Err(e) => { + warn!("Failed to run version_command for {}: {}", pkg.pkg_name, e); + return Ok(None); + } + }; + + let v = result + .version + .strip_prefix('v') + .unwrap_or(&result.version) + .to_string(); + + let installed_version = pkg.version.strip_prefix('v').unwrap_or(&pkg.version); + if v == installed_version { + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::UpToDate { + version: pkg.version.clone(), + }, + }); + return Ok(None); + } + + let (url, should_update_toml_url) = match result.download_url { + Some(url) => (url, true), + None => { + match &resolved.url { + Some(url) => (substitute_placeholders(url, Some(&v)), false), + None => { + warn!( + "version_command returned no URL and no url field configured for {}", + pkg.pkg_name + ); + return Ok(None); + } + } + } + }; + + let toml_url = if is_github_or_gitlab || !should_update_toml_url { + None + } else { + Some(url.clone()) + }; + (v, url, result.size, toml_url) + } else { + let release_source = match ReleaseSource::from_resolved(resolved) { + Some(s) => s, + None => { + warn!("No release source configured for {}", pkg.pkg_name); + return Ok(None); + } + }; + let release = match release_source.resolve() { + Ok(r) => r, + Err(e) => { + warn!("Failed to check for updates for {}: {}", pkg.pkg_name, e); + return Ok(None); + } + }; + + let v = release + .version + .strip_prefix('v') + .unwrap_or(&release.version) + .to_string(); + + let installed_version = pkg.version.strip_prefix('v').unwrap_or(&pkg.version); + if v == installed_version { + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::UpToDate { + version: pkg.version.clone(), + }, + }); + return Ok(None); + } + + let url = if is_github_or_gitlab { + None + } else { + Some(release.download_url.clone()) + }; + (v, release.download_url, release.size, url) + }; + + let mut updated_url_pkg = UrlPackage::from_remote( + &download_url, + Some(&pkg.pkg_name), + Some(&version), + pkg.pkg_type.as_deref(), + Some(&pkg.pkg_id), + )?; + updated_url_pkg.size = size; + + ctx.events().emit(SoarEvent::UpdateCheck { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + status: UpdateCheckStatus::Available { + current_version: pkg.version.clone(), + new_version: version.clone(), + }, + }); + + let target = InstallTarget { + package: updated_url_pkg.to_package(), + existing_install: Some(pkg.clone()), + pinned: resolved.pinned, + profile: resolved.profile.clone(), + portable: resolved.portable.as_ref().and_then(|p| p.path.clone()), + portable_home: resolved.portable.as_ref().and_then(|p| p.home.clone()), + portable_config: resolved.portable.as_ref().and_then(|p| p.config.clone()), + portable_share: resolved.portable.as_ref().and_then(|p| p.share.clone()), + portable_cache: resolved.portable.as_ref().and_then(|p| p.cache.clone()), + entrypoint: resolved.entrypoint.clone(), + binaries: resolved.binaries.clone(), + nested_extract: resolved.nested_extract.clone(), + extract_root: resolved.extract_root.clone(), + hooks: resolved.hooks.clone(), + build: resolved.build.clone(), + sandbox: resolved.sandbox.clone(), + }; + + Ok(Some(UpdateInfo { + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + repo_name: pkg.repo_name.clone(), + current_version: pkg.version.clone(), + new_version: version, + target, + update_toml_url, + })) +} + +fn has_update_source(resolved: &ResolvedPackage) -> bool { + resolved.version_command.is_some() || resolved.github.is_some() || resolved.gitlab.is_some() +} + +fn get_existing( + package: &Package, + diesel_db: &DieselDatabase, +) -> SoarResult> { + let existing = diesel_db.with_conn(|conn| { + CoreRepository::find_exact( + conn, + &package.repo_name, + &package.pkg_name, + &package.pkg_id, + &package.version, + ) + })?; + + Ok(existing.map(Into::into)) +} + +/// Perform updates for the given update targets. +/// +/// Each update is essentially an install of the new version followed by +/// cleanup of old versions (unless `keep_old` is true). +pub async fn perform_update( + ctx: &SoarContext, + updates: Vec, + keep_old: bool, +) -> SoarResult { + debug!( + count = updates.len(), + keep_old = keep_old, + "performing updates" + ); + + let packages_config = PackagesConfig::load(None).ok(); + let resolved_packages = packages_config + .as_ref() + .map(|c| c.resolved_packages()) + .unwrap_or_default(); + + // Collect URL update tracking info before we consume the updates + let url_tracking: Vec<(String, String, Option)> = updates + .iter() + .filter(|u| u.repo_name == "local") + .filter_map(|u| { + resolved_packages + .iter() + .find(|r| r.name == u.pkg_name && has_update_source(r)) + .map(|_| { + ( + u.pkg_name.clone(), + u.new_version.clone(), + u.update_toml_url.clone(), + ) + }) + }) + .collect(); + + let targets: Vec = updates.into_iter().map(|u| u.target).collect(); + + let options = InstallOptions { + no_verify: false, + ..Default::default() + }; + + let install_report = perform_installation(ctx, targets.clone(), &options).await?; + + // Clean up old versions only for successfully updated packages + if !keep_old { + let diesel_db = ctx.diesel_core_db()?.clone(); + let succeeded: HashSet<(&str, &str)> = install_report + .installed + .iter() + .map(|i| (i.pkg_name.as_str(), i.pkg_id.as_str())) + .collect(); + + for target in &targets { + let pkg = &target.package; + if !succeeded.contains(&(pkg.pkg_name.as_str(), pkg.pkg_id.as_str())) { + continue; + } + + let op_id = next_op_id(); + ctx.events().emit(SoarEvent::UpdateCleanup { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + old_version: target + .existing_install + .as_ref() + .map(|e| e.version.clone()) + .unwrap_or_default(), + stage: UpdateCleanupStage::Removing, + }); + + let _ = remove_old_versions(pkg, &diesel_db, false); + + ctx.events().emit(SoarEvent::UpdateCleanup { + op_id, + pkg_name: pkg.pkg_name.clone(), + pkg_id: pkg.pkg_id.clone(), + old_version: target + .existing_install + .as_ref() + .map(|e| e.version.clone()) + .unwrap_or_default(), + stage: UpdateCleanupStage::Complete { + size_freed: None, + }, + }); + } + } + + // Update packages.toml for URL packages + let mut url_updates = Vec::new(); + let diesel_db = ctx.diesel_core_db()?; + for (pkg_name, new_version, new_url) in url_tracking { + let is_installed = diesel_db + .with_conn(|conn| { + CoreRepository::list_filtered( + conn, + Some("local"), + Some(&pkg_name), + None, + Some(&new_version), + Some(true), + None, + Some(1), + None, + ) + }) + .map(|pkgs| !pkgs.is_empty()) + .unwrap_or(false); + + if is_installed { + if let Err(e) = PackagesConfig::update_package( + &pkg_name, + new_url.as_deref(), + Some(&new_version), + None, + ) { + warn!( + "Failed to update version for '{}' in packages.toml: {}", + pkg_name, e + ); + } + + url_updates.push(UrlUpdateInfo { + pkg_name, + new_version, + new_url, + }); + } + } + + Ok(UpdateReport { + updated: install_report.installed, + failed: install_report.failed, + url_updates, + }) +} diff --git a/crates/soar-operations/src/utils.rs b/crates/soar-operations/src/utils.rs new file mode 100644 index 00000000..ae2aa035 --- /dev/null +++ b/crates/soar-operations/src/utils.rs @@ -0,0 +1,320 @@ +use std::{ + collections::HashSet, + fs, + os::{unix, unix::fs::PermissionsExt}, + path::{Path, PathBuf}, +}; + +use soar_config::{ + config::Config, + packages::{BinaryMapping, PackageHooks, PackagesConfig, SandboxConfig}, +}; +use soar_core::{ + database::models::Package, + error::{ErrorContext, SoarError}, + utils::substitute_placeholders, + SoarResult, +}; +use soar_db::models::types::{PackageProvide, ProvideStrategy}; +use soar_utils::fs::is_elf; + +/// Check if a package should have desktop integration (desktop files, icons). +pub fn has_desktop_integration(package: &Package, config: &Config) -> bool { + match package.desktop_integration { + Some(false) => false, + _ => config.has_desktop_integration(&package.repo_name), + } +} + +/// Look up hooks and sandbox configuration for a package from packages.toml. +pub fn get_package_hooks(pkg_name: &str) -> (Option, Option) { + let config = match PackagesConfig::load(None) { + Ok(c) => c, + Err(_) => return (None, None), + }; + + config + .resolved_packages() + .into_iter() + .find(|p| p.name == pkg_name) + .map(|p| (p.hooks, p.sandbox)) + .unwrap_or((None, None)) +} + +/// Creates symlinks from installed package binaries to the bin directory. +pub async fn mangle_package_symlinks( + install_dir: &Path, + bin_dir: &Path, + provides: Option<&[PackageProvide]>, + pkg_name: &str, + version: &str, + entrypoint: Option<&str>, + binaries: Option<&[BinaryMapping]>, +) -> SoarResult> { + let mut symlinks = Vec::new(); + + if let Some(bins) = binaries { + if !bins.is_empty() { + for mapping in bins { + let source_pattern = substitute_placeholders(&mapping.source, Some(version)); + let source_paths: Vec = fs::read_dir(install_dir) + .with_context(|| format!("reading directory {}", install_dir.display()))? + .filter_map(|entry| entry.ok()) + .filter(|entry| { + let name = entry.file_name(); + fast_glob::glob_match(&source_pattern, name.to_string_lossy().to_string()) + }) + .map(|entry| entry.path()) + .collect(); + + if source_paths.is_empty() { + return Err(SoarError::Custom(format!( + "Binary source '{}' not found in package", + source_pattern + ))); + } + + let single_match = source_paths.len() == 1; + for source_path in source_paths { + let link_name = if single_match { + mapping.link_as.as_deref() + } else { + None + } + .unwrap_or_else(|| { + source_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(&mapping.source) + }); + let link_path = bin_dir.join(link_name); + + set_executable(&source_path)?; + + if link_path.is_symlink() || link_path.is_file() { + std::fs::remove_file(&link_path).with_context(|| { + format!("removing existing file/symlink at {}", link_path.display()) + })?; + } + + unix::fs::symlink(&source_path, &link_path).with_context(|| { + format!( + "creating symlink {} -> {}", + source_path.display(), + link_path.display() + ) + })?; + symlinks.push((source_path, link_path)); + } + } + return Ok(symlinks); + } + } + + let mut processed_paths = HashSet::new(); + let provides = provides.unwrap_or_default(); + for provide in provides { + let real_path = install_dir.join(provide.name.clone()); + let mut symlink_targets = Vec::new(); + + if let Some(ref target) = provide.target { + if provide.strategy.is_some() { + let target_path = bin_dir.join(target); + if processed_paths.insert(target_path.clone()) { + symlink_targets.push(target_path); + } + } + }; + + let needs_original_symlink = matches!( + (provide.target.as_ref(), provide.strategy.clone()), + (Some(_), Some(ProvideStrategy::KeepBoth)) | (None, _) + ); + + if needs_original_symlink { + let original_path = bin_dir.join(&provide.name); + if processed_paths.insert(original_path.clone()) { + symlink_targets.push(original_path); + } + } + + for target_path in symlink_targets { + if target_path.is_symlink() || target_path.is_file() { + std::fs::remove_file(&target_path) + .with_context(|| format!("removing provide {}", target_path.display()))?; + } + unix::fs::symlink(&real_path, &target_path).with_context(|| { + format!( + "creating symlink {} -> {}", + real_path.display(), + target_path.display() + ) + })?; + symlinks.push((real_path.clone(), target_path)); + } + } + + if provides.is_empty() { + let soar_syms = install_dir.join("SOAR_SYMS"); + let (is_syms, binaries_dir) = if soar_syms.is_dir() { + (true, soar_syms.as_path()) + } else { + (false, install_dir) + }; + + if let Some(executable) = + find_executable(install_dir, binaries_dir, is_syms, pkg_name, entrypoint)? + { + set_executable(&executable)?; + + let symlink_name = bin_dir.join(pkg_name); + if symlink_name.is_symlink() || symlink_name.is_file() { + std::fs::remove_file(&symlink_name).with_context(|| { + format!( + "removing existing file/symlink at {}", + symlink_name.display() + ) + })?; + } + unix::fs::symlink(&executable, &symlink_name).with_context(|| { + format!( + "creating symlink {} -> {}", + executable.display(), + symlink_name.display() + ) + })?; + symlinks.push((executable, symlink_name)); + } + } + Ok(symlinks) +} + +fn set_executable(path: &Path) -> SoarResult<()> { + let metadata = + fs::metadata(path).with_context(|| format!("reading metadata for {}", path.display()))?; + let mut perms = metadata.permissions(); + let mode = perms.mode(); + if mode & 0o111 == 0 { + perms.set_mode(mode | 0o111); + fs::set_permissions(path, perms) + .with_context(|| format!("setting executable permissions on {}", path.display()))?; + } + Ok(()) +} + +fn find_executable( + install_dir: &Path, + binaries_dir: &Path, + is_syms: bool, + pkg_name: &str, + entrypoint: Option<&str>, +) -> SoarResult> { + if let Some(entry) = entrypoint { + let entrypoint_path = install_dir.join(entry); + if entrypoint_path.is_file() { + return Ok(Some(entrypoint_path)); + } + if binaries_dir != install_dir { + let entrypoint_in_syms = binaries_dir.join(entry); + if entrypoint_in_syms.is_file() { + return Ok(Some(entrypoint_in_syms)); + } + } + } + + let files: Vec = fs::read_dir(binaries_dir) + .with_context(|| { + format!( + "reading directory {} for executable discovery", + binaries_dir.display() + ) + })? + .filter_map(|e| e.ok()) + .map(|e| e.path()) + .filter(|p| p.is_file() && (is_syms || is_elf(p))) + .collect(); + + let pkg_name_lower = pkg_name.to_lowercase(); + + if let Some(found) = find_matching_executable(&files, pkg_name, &pkg_name_lower) { + return Ok(Some(found)); + } + + let fallback_dirs = ["bin", "usr/bin", "usr/local/bin"]; + for fallback in fallback_dirs { + let fallback_path = install_dir.join(fallback); + if fallback_path.is_dir() { + let exact_path = fallback_path.join(pkg_name); + if exact_path.is_file() && is_elf(&exact_path) { + return Ok(Some(exact_path)); + } + if let Ok(entries) = fs::read_dir(&fallback_path) { + let fallback_files: Vec = entries + .filter_map(|e| e.ok()) + .map(|e| e.path()) + .filter(|p| p.is_file() && is_elf(p)) + .collect(); + if let Some(found) = + find_matching_executable(&fallback_files, pkg_name, &pkg_name_lower) + { + return Ok(Some(found)); + } + } + } + } + + let mut all_files = Vec::new(); + collect_executables_recursive(install_dir, &mut all_files); + + if let Some(found) = find_matching_executable(&all_files, pkg_name, &pkg_name_lower) { + return Ok(Some(found)); + } + + Ok(all_files.into_iter().next()) +} + +fn collect_executables_recursive(dir: &Path, files: &mut Vec) { + let Ok(entries) = fs::read_dir(dir) else { + return; + }; + for entry in entries.filter_map(|e| e.ok()) { + let path = entry.path(); + if path.is_dir() { + collect_executables_recursive(&path, files); + } else if path.is_file() && is_elf(&path) { + files.push(path); + } + } +} + +fn find_matching_executable( + files: &[PathBuf], + pkg_name: &str, + pkg_name_lower: &str, +) -> Option { + files + .iter() + .find(|p| { + p.file_name() + .and_then(|n| n.to_str()) + .map(|n| n == pkg_name) + .unwrap_or(false) + }) + .or_else(|| { + files.iter().find(|p| { + p.file_name() + .and_then(|n| n.to_str()) + .map(|n| n.to_lowercase() == *pkg_name_lower) + .unwrap_or(false) + }) + }) + .or_else(|| { + files.iter().find(|p| { + p.file_stem() + .and_then(|n| n.to_str()) + .map(|n| n.to_lowercase() == *pkg_name_lower) + .unwrap_or(false) + }) + }) + .cloned() +}