diff --git a/Cargo.lock b/Cargo.lock index fe03fb8845..9c0f21c118 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2245,6 +2245,15 @@ version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +[[package]] +name = "memmap2" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" +dependencies = [ + "libc", +] + [[package]] name = "miette" version = "7.6.0" @@ -4955,16 +4964,19 @@ dependencies = [ "arc-swap", "async-stream", "async-trait", + "bytes", "colored", "dashmap", "data-encoding", "enum_dispatch", + "flatbuffers", "format_serde_error", "futures", "glob", "ignore", "indexmap 2.11.0", "itertools 0.14.0", + "memmap2", "miette", "nom", "nom-supreme", @@ -4980,7 +4992,10 @@ dependencies = [ "serde_yaml 0.9.34+deprecated", "spfs", "spfstest", + "spk-config", + "spk-proto", "spk-schema", + "spk-solve-macros", "sys-info", "tar", "tempfile", diff --git a/Cargo.toml b/Cargo.toml index c69e74278e..b3bfb54917 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,6 +80,7 @@ indicatif = "0.17.8" is_default_derive_macro = { path = "crates/is_default_derive_macro" } itertools = "0.14" libc = "0.2.172" +memmap2 = "0.9.10" miette = "7.0" nix = { version = "0.29", features = ["mount", "sched", "user"] } nom = "7.1" diff --git a/crates/spk-build/src/archive_test.rs b/crates/spk-build/src/archive_test.rs index f9bd3562ba..a6be70b3b8 100644 --- a/crates/spk-build/src/archive_test.rs +++ b/crates/spk-build/src/archive_test.rs @@ -42,7 +42,9 @@ async fn test_archive_create_parents(#[case] solver: SolverImpl) { let filename = rt.tmpdir.path().join("deep/nested/path/archive.spk"); let repo = match &*rt.tmprepo { spk_solve::RepositoryHandle::SPFS(repo) => repo, - spk_solve::RepositoryHandle::Mem(_) | spk_solve::RepositoryHandle::Runtime(_) => { + spk_solve::RepositoryHandle::Mem(_) + | spk_solve::RepositoryHandle::Runtime(_) + | spk_solve::RepositoryHandle::Indexed(_) => { panic!("only spfs repositories are supported") } }; diff --git a/crates/spk-cli/group3/src/cmd_export.rs b/crates/spk-cli/group3/src/cmd_export.rs index 02147bf4da..d4b5e3027b 100644 --- a/crates/spk-cli/group3/src/cmd_export.rs +++ b/crates/spk-cli/group3/src/cmd_export.rs @@ -52,7 +52,9 @@ impl Run for Export { .iter() .map(|repo| match &**repo { storage::RepositoryHandle::SPFS(repo) => Ok(repo), - storage::RepositoryHandle::Mem(_) | storage::RepositoryHandle::Runtime(_) => { + storage::RepositoryHandle::Mem(_) + | storage::RepositoryHandle::Runtime(_) + | storage::RepositoryHandle::Indexed(_) => { bail!("Only spfs repositories are supported") } }) diff --git a/crates/spk-cli/group3/src/cmd_import_test.rs b/crates/spk-cli/group3/src/cmd_import_test.rs index b1b15ee189..0022c4a753 100644 --- a/crates/spk-cli/group3/src/cmd_import_test.rs +++ b/crates/spk-cli/group3/src/cmd_import_test.rs @@ -44,7 +44,9 @@ async fn test_archive_io(#[case] solver: SolverImpl) { filename.ensure(); let repo = match &*rt.tmprepo { spk_solve::RepositoryHandle::SPFS(repo) => repo, - spk_solve::RepositoryHandle::Mem(_) | spk_solve::RepositoryHandle::Runtime(_) => { + spk_solve::RepositoryHandle::Mem(_) + | spk_solve::RepositoryHandle::Runtime(_) + | spk_solve::RepositoryHandle::Indexed(_) => { panic!("only spfs repositories are supported") } }; diff --git a/crates/spk-config/src/config.rs b/crates/spk-config/src/config.rs index 59e40dce22..c7224f69f2 100644 --- a/crates/spk-config/src/config.rs +++ b/crates/spk-config/src/config.rs @@ -87,6 +87,41 @@ pub struct Solver { /// Name of the solver whose output to show when multiple solvers are being run. pub solver_to_show: String, + + /// Whether to get the solver to use repository indexes, if + /// available, instead of the repository directly. + pub use_indexes: bool, + + /// Default setting for indexes, if using indexes is enabled for + /// the solver. + pub indexes: Index, +} + +/// The settings for one or more indexes +#[derive(Clone, Default, Debug, Deserialize, Serialize)] +#[serde(default)] +pub struct Index { + /// Whether to validate the index data before using it. + /// Validating is safer but can add overhead at the start of a + /// solve that uses indexes. + pub verify_before_use: bool, + + /// What kind of index to use. Only applies if there is more than + /// one kind of index available for the repository. The default is + /// 'flatb', a flatbuffers file based index. + pub kind: String, +} + +/// The settings for a single repository +#[derive(Clone, Default, Debug, Deserialize, Serialize)] +#[serde(default)] +pub struct Repository { + /// Whether to use an index with this repository, if one is + /// available. + pub use_index: bool, + + /// Setting for the repositories index, if an index is enabled. + pub index: Index, } #[derive(Clone, Default, Debug, Deserialize, Serialize)] @@ -142,6 +177,7 @@ pub struct Config { // with environment variables. pub sentry: Sentry, pub solver: Solver, + pub repositories: HashMap, pub statsd: Statsd, pub metadata: Metadata, pub cli: Cli, diff --git a/crates/spk-schema/src/fb_converter.rs b/crates/spk-schema/src/fb_converter.rs index 2a65aecf63..31d4aa4fc2 100644 --- a/crates/spk-schema/src/fb_converter.rs +++ b/crates/spk-schema/src/fb_converter.rs @@ -4,7 +4,6 @@ use std::collections::{BTreeMap, BTreeSet}; use std::str::FromStr; -use std::sync::Arc; use spk_schema_foundation::IsDefault; use spk_schema_foundation::ident::{ @@ -1096,7 +1095,7 @@ pub fn component_specs_to_fb_component_specs<'a>( pub fn embedded_pkg_specs_to_fb_embedded_package_specs<'a>( builder: &mut flatbuffers::FlatBufferBuilder<'a>, - embedded: Arc>, + embedded: &EmbeddedPackagesList, ) -> Option< flatbuffers::WIPOffset< flatbuffers::Vector< diff --git a/crates/spk-schema/src/v0/indexed_package.rs b/crates/spk-schema/src/v0/indexed_package.rs index 40a60f05e8..b77f61e200 100644 --- a/crates/spk-schema/src/v0/indexed_package.rs +++ b/crates/spk-schema/src/v0/indexed_package.rs @@ -43,6 +43,7 @@ use crate::{ DownstreamRequirements, EmbeddedPackagesList, Error, + Inheritance, Opt, Package, PackageMut, @@ -503,15 +504,42 @@ impl DownstreamRequirements for IndexedPackage { &self, _components: impl IntoIterator, ) -> Cow<'_, RequirementsList> { - // This is also for build var requirements and inheritance - // used in building. This package has no build data stored. - let err = Error::SpkIndexedPackageDoesNotImplement( - "DownstreamRequirements".to_string(), - "downstream_runtime_requirements".to_string(), - ); - // TODO: should this change the return value, update all the - // caller's handling, and return an error for this implementation? - unreachable!("{err}"); + // This is used when deprecating an embedded stub/package + // and this is exercised during the automated repository tests. + + // This is a version of downstream_runtime_requirements() and + // downstream_requirements() from v0/package_spec.rs modified + // for this kind of flatbuffer backed package. + let build_options = self.build_options(); + let embedded = self.embedded(); + + let requests = build_options + .iter() + .filter_map(|opt| match opt { + Opt::Var(v) => Some(v.with_default_namespace(self.name())), + Opt::Pkg(_) => None, + }) + .chain(embedded.iter().flat_map(|embed| { + embed.build().options.iter().filter_map(|opt| match opt { + Opt::Var(v) => Some(v.with_default_namespace(embed.name())), + Opt::Pkg(_) => None, + }) + })) + .filter(|o| o.inheritance() == Inheritance::Strong || o.required) + .map(|o| { + VarRequest { + // we are assuming that the var here will have a value because + // this is a built binary package + value: o.get_value(None).unwrap_or_default().into(), + var: o.var, + // Index doesn't store the an option's description + description: None, + } + }) + .map(RequestWithOptions::Var); + RequirementsList::::try_from_iter(requests) + .map(Cow::Owned) + .expect("build opts (from a RepoIndex) do not contain duplicates") } } diff --git a/crates/spk-solve/src/solvers/resolvo/spk_provider.rs b/crates/spk-solve/src/solvers/resolvo/spk_provider.rs index e0a4788253..3bd0eec16e 100644 --- a/crates/spk-solve/src/solvers/resolvo/spk_provider.rs +++ b/crates/spk-solve/src/solvers/resolvo/spk_provider.rs @@ -138,7 +138,21 @@ impl ResolvoPackageName { // "requires_build_from_source" being true. let mut located_builds = HashSet::new(); + // This gets the current request with the same pkg_name, so can + // get build from it. Then if there is a digest in this request, + // and if the pkg build ident matching this one's build ident, then + // this is a direct request for a build, which has implications + // later for deprecated builds processing. let root_pkg_request = provider.global_pkg_requests.get(&pkg_name.name); + let direct_build_request = if let Some(pkg_request) = root_pkg_request { + if pkg_request.pkg.build.is_some() { + pkg_request.pkg.build.clone() + } else { + None + } + } else { + None + }; for repo in &provider.repos { let versions = repo @@ -319,6 +333,41 @@ impl ResolvoPackageName { continue; } + // TODO: These checks, and the earlier ones for direct_build_requests, + // may be rendered obsolete with indexes storing the normal data for + // deprecated packages, and indexes being fast. + // + // TODO: if going forward with this, test it with command line added + // package build requests. + // + // A short circuit for avoiding some processing if this build + // is deprecated. Works for an IndexedRepo, all other SPK repos + // return an error. By asking the IndexedRepo directly if the + // build is deprecated, we can avoid reading the full build + // to check. The two cases are: + // + // if build is deprecated and not used in any known direct build reqs, + // then add to excludes and don't read it. + // if build is deprecated and used in a known direct build reqs, + // then keep it around just in case that build appears in the resolve. + // + if let Ok(true) = repo.is_build_deprecated(ident.as_build()).await { + if let Some(ref build_ident) = direct_build_request + && build_ident == ident.build() + { + // Allow it to continue because, even though it is deprecated, + // the current request is for a specific build and this + // build matches the request. + } else { + // Exclude this deprecated build. + let reason = provider + .pool + .intern_string(format!("{} is deprecated", ident)); + candidates.excluded.push((solvable_id, reason)); + continue; + } + } + match repo.read_package(ident.target()).await { Ok(package) => { // Filter builds that don't satisfy global var requests diff --git a/crates/spk-solve/src/solvers/solver_test.rs b/crates/spk-solve/src/solvers/solver_test.rs index 5379906a5e..12f77907be 100644 --- a/crates/spk-solve/src/solvers/solver_test.rs +++ b/crates/spk-solve/src/solvers/solver_test.rs @@ -44,6 +44,25 @@ fn solver() -> StepSolver { StepSolver::default() } +// Helper for indexed test cases +async fn wrap_repo_for_test(repo: RepositoryHandle, use_index: bool) -> RepositoryHandle { + if use_index { + let mut ir = match spk_storage::IndexedRepository::generate_from_repo(Arc::new(repo)).await + { + Ok(ir) => ir, + Err(err) => { + panic!( + "Unable to make IndexedRepository: Failed to generate an in-mem index from a repo: {err}" + ) + } + }; + ir.set_update_index_after_any_publish(true); + ir.into() + } else { + repo + } +} + /// Asserts that a package exists in the solution at a specific version, /// or that the solution contains a specific set of packages by name. /// @@ -180,6 +199,7 @@ async fn test_solver_no_requests(#[case] mut solver: SolverImpl) { #[tokio::test] async fn test_solver_package_with_no_recipe( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, random_build_id: BuildId, ) { let repo = RepositoryHandle::new_mem(); @@ -197,6 +217,7 @@ async fn test_solver_package_with_no_recipe( repo.publish_package(&spec.into(), &components) .await .unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.update_options(options); solver.add_repository(Arc::new(repo)); @@ -216,6 +237,7 @@ async fn test_solver_package_with_no_recipe( #[tokio::test] async fn test_solver_package_with_no_recipe_and_impossible_initial_checks( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, random_build_id: BuildId, ) { init_logging(); @@ -229,6 +251,7 @@ async fn test_solver_package_with_no_recipe_and_impossible_initial_checks( .into_iter() .collect(); repo.publish_package(&spec, &components).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.update_options(options); solver.add_repository(Arc::new(repo)); @@ -253,7 +276,10 @@ async fn test_solver_package_with_no_recipe_and_impossible_initial_checks( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_package_with_no_recipe_from_cmd_line(#[case] mut solver: SolverImpl) { +async fn test_solver_package_with_no_recipe_from_cmd_line( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let repo = RepositoryHandle::new_mem(); let spec = spec!({"pkg": "my-pkg/1.0.0/4OYMIQUY"}); @@ -266,6 +292,7 @@ async fn test_solver_package_with_no_recipe_from_cmd_line(#[case] mut solver: So .into_iter() .collect(); repo.publish_package(&spec, &components).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // Create this one as requested by the command line, rather than the tests @@ -289,9 +316,11 @@ async fn test_solver_package_with_no_recipe_from_cmd_line(#[case] mut solver: So #[tokio::test] async fn test_solver_package_with_no_recipe_from_cmd_line_and_impossible_initial_checks( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { init_logging(); let repo = RepositoryHandle::new_mem(); + let repo = wrap_repo_for_test(repo, use_index).await; let spec = spec!({"pkg": "my-pkg/1.0.0/4OYMIQUY"}); @@ -328,9 +357,13 @@ async fn test_solver_package_with_no_recipe_from_cmd_line_and_impossible_initial #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_single_package_no_deps(#[case] mut solver: SolverImpl) { +async fn test_solver_single_package_no_deps( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let options = option_map! {}; let repo = make_repo!([{"pkg": "my-pkg/1.0.0"}], options=options.clone()); + let repo = wrap_repo_for_test(repo, use_index).await; solver.update_options(options); solver.add_repository(Arc::new(repo)); @@ -347,7 +380,10 @@ async fn test_solver_single_package_no_deps(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_single_package_simple_deps(#[case] mut solver: SolverImpl) { +async fn test_solver_single_package_simple_deps( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let options = option_map! {}; let repo = make_repo!( [ @@ -360,6 +396,7 @@ async fn test_solver_single_package_simple_deps(#[case] mut solver: SolverImpl) {"pkg": "pkg-b/1.1.0", "install": {"requirements": [{"pkg": "pkg-a/1.2"}]}}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.update_options(options); solver.add_repository(Arc::new(repo)); @@ -375,7 +412,10 @@ async fn test_solver_single_package_simple_deps(#[case] mut solver: SolverImpl) #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_abi_compat(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_abi_compat( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let options = option_map! {}; let repo = make_repo!( [ @@ -391,6 +431,7 @@ async fn test_solver_dependency_abi_compat(#[case] mut solver: SolverImpl) { {"pkg": "pkg-a/0.9.0", "compat": "x.a.b"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.update_options(options); solver.add_repository(Arc::new(repo)); @@ -406,7 +447,10 @@ async fn test_solver_dependency_abi_compat(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_incompatible(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_incompatible( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is added which is incompatible // with an existing request in the stack let repo = make_repo!( @@ -419,6 +463,7 @@ async fn test_solver_dependency_incompatible(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-plugin/1")); @@ -434,7 +479,10 @@ async fn test_solver_dependency_incompatible(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_incompatible_stepback(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_incompatible_stepback( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is added which is incompatible // with an existing request in the stack - in this case we want the solver // to successfully step back into an older package version with @@ -453,6 +501,7 @@ async fn test_solver_dependency_incompatible_stepback(#[case] mut solver: Solver }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-plugin/1")); @@ -469,7 +518,10 @@ async fn test_solver_dependency_incompatible_stepback(#[case] mut solver: Solver #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_already_satisfied(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_already_satisfied( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is added which represents // a package which has already been resolved // - and the resolved version satisfies the request @@ -489,6 +541,8 @@ async fn test_solver_dependency_already_satisfied(#[case] mut solver: SolverImpl {"pkg": "dep-2/1.0.0", "install": {"requirements": [{"pkg": "dep-1/1"}]}}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("pkg-top")); @@ -504,6 +558,7 @@ async fn test_solver_dependency_already_satisfied(#[case] mut solver: SolverImpl #[tokio::test] async fn test_solver_dependency_already_satisfied_conflicting_components( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { // like test_solver_dependency_already_satisfied but with conflicting components @@ -532,6 +587,8 @@ async fn test_solver_dependency_already_satisfied_conflicting_components( {"pkg": "dep-2/1.0.0", "install": {"requirements": [{"pkg": "dep-1:comp2/1"}]}}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("pkg-top")); @@ -550,7 +607,10 @@ async fn test_solver_dependency_already_satisfied_conflicting_components( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_reopen_solvable(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_reopen_solvable( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is added which represents // a package which has already been resolved // - and the resolved version does not satisfy the request @@ -575,6 +635,8 @@ async fn test_solver_dependency_reopen_solvable(#[case] mut solver: SolverImpl) }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-plugin")); @@ -587,7 +649,10 @@ async fn test_solver_dependency_reopen_solvable(#[case] mut solver: SolverImpl) #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_reiterate(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_reiterate( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a package iterator must be run through twice // - walking back up the solve graph should reset the iterator to where it was @@ -611,6 +676,8 @@ async fn test_solver_dependency_reiterate(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-plugin")); @@ -623,7 +690,10 @@ async fn test_solver_dependency_reiterate(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_dependency_reopen_unsolvable(#[case] mut solver: SolverImpl) { +async fn test_solver_dependency_reopen_unsolvable( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is added which represents // a package which has already been resolved // - and the resolved version does not satisfy the request @@ -646,6 +716,8 @@ async fn test_solver_dependency_reopen_unsolvable(#[case] mut solver: SolverImpl }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("pkg-top")); @@ -657,7 +729,10 @@ async fn test_solver_dependency_reopen_unsolvable(#[case] mut solver: SolverImpl #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_pre_release_config(#[case] mut solver: SolverImpl) { +async fn test_solver_pre_release_config( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let repo = make_repo!( [ {"pkg": "my-pkg/0.9.0"}, @@ -666,6 +741,7 @@ async fn test_solver_pre_release_config(#[case] mut solver: SolverImpl) { {"pkg": "my-pkg/1.0.0-pre.2"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -693,7 +769,10 @@ async fn test_solver_pre_release_config(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_pre_release_config_with_requirements(#[case] mut solver: SolverImpl) { +async fn test_solver_pre_release_config_with_requirements( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let repo = make_repo!( [ {"pkg": "my-pkg/1.0.0-pre.2"}, @@ -705,6 +784,7 @@ async fn test_solver_pre_release_config_with_requirements(#[case] mut solver: So }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo); @@ -721,7 +801,10 @@ async fn test_solver_pre_release_config_with_requirements(#[case] mut solver: So #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_constraint_only(#[case] mut solver: SolverImpl) { +async fn test_solver_constraint_only( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is marked as a constraint/optional // and no other request is added // - the constraint is noted @@ -739,6 +822,8 @@ async fn test_solver_constraint_only(#[case] mut solver: SolverImpl) { } ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("vnp3")); @@ -750,7 +835,10 @@ async fn test_solver_constraint_only(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_constraint_and_request(#[case] mut solver: SolverImpl) { +async fn test_solver_constraint_and_request( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is marked as a constraint/optional // and also requested by another package // - the constraint is noted @@ -774,6 +862,8 @@ async fn test_solver_constraint_and_request(#[case] mut solver: SolverImpl) { {"pkg": "python/3.8.1"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-tool")); @@ -788,6 +878,7 @@ async fn test_solver_constraint_and_request(#[case] mut solver: SolverImpl) { #[tokio::test] async fn test_solver_option_compatibility( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, #[values("~2.0", "~2.7", "~2.7.5", "2,<3", "2.7,<3", "3", "3.7", "3.7.3")] pyver: &str, ) { // test what happens when an option is given in the solver @@ -820,6 +911,8 @@ async fn test_solver_option_compatibility( let for_py37 = make_build!(spec, [py37], { "python" => "3.7.3" }); let repo = make_repo!([for_py27, for_py26, for_py37, for_py371]); repo.publish_recipe(&spec).await.unwrap(); + + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); // The 'by_build_option_values' build sorting method does not use @@ -866,7 +959,10 @@ async fn test_solver_option_compatibility( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_option_injection(#[case] mut solver: SolverImpl) { +async fn test_solver_option_injection( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test the options that are defined when a package is resolved // - options are namespaced and added to the environment init_logging(); @@ -893,6 +989,7 @@ async fn test_solver_option_injection(#[case] mut solver: SolverImpl) { let build = make_build!(spec, [pybuild]); let repo = make_repo!([build]); repo.publish_recipe(&spec).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("vnp3")); @@ -921,7 +1018,10 @@ async fn test_solver_option_injection(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_build_from_source(#[case] mut solver: SolverImpl) { +async fn test_solver_build_from_source( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { init_logging(); // test when no appropriate build exists but the source is available // - the build is skipped @@ -941,6 +1041,7 @@ async fn test_solver_build_from_source(#[case] mut solver: SolverImpl) { ], options={"debug" => "off"} ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -978,7 +1079,10 @@ async fn test_solver_build_from_source(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_build_from_source_unsolvable(#[case] mut solver: SolverImpl) { +async fn test_solver_build_from_source_unsolvable( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let log = init_logging(); // test when no appropriate build exists but the source is available @@ -1008,6 +1112,7 @@ async fn test_solver_build_from_source_unsolvable(#[case] mut solver: SolverImpl // TODO: why is this the case, can we avoid this in the macro? repo.remove_recipe(recipe.ident()).await.ok(); repo.publish_recipe(&recipe).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.set_binary_only(false); @@ -1109,7 +1214,10 @@ async fn test_solver_build_from_source_dependency(#[case] mut solver: SolverImpl #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_build_from_source_dependency_but_hit_loop(#[case] mut solver: SolverImpl) { +async fn test_solver_build_from_source_dependency_but_hit_loop( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when no appropriate build exists but the source is available // - the existing build is skipped // - the source package is checked for current options @@ -1151,6 +1259,7 @@ async fn test_solver_build_from_source_dependency_but_hit_loop(#[case] mut solve }, }); repo.force_publish_recipe(&recipe).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; // the new option value should disqualify the existing build // but a new one should be generated for this set of options @@ -1171,7 +1280,10 @@ async fn test_solver_build_from_source_dependency_but_hit_loop(#[case] mut solve #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_deprecated_build(#[case] mut solver: SolverImpl) { +async fn test_solver_deprecated_build( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let deprecated = make_build!({"pkg": "my-pkg/1.0.0", "deprecated": true}); let deprecated_build = deprecated.ident().clone(); let repo = make_repo!([ @@ -1179,6 +1291,7 @@ async fn test_solver_deprecated_build(#[case] mut solver: SolverImpl) { {"pkg": "my-pkg/1.0.0"}, deprecated, ]); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -1215,11 +1328,15 @@ async fn test_solver_deprecated_build(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_deprecated_version(#[case] mut solver: SolverImpl) { +async fn test_solver_deprecated_version( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let deprecated = make_build!({"pkg": "my-pkg/1.0.0", "deprecated": true}); let repo = make_repo!( [{"pkg": "my-pkg/0.9.0"}, {"pkg": "my-pkg/1.0.0", "deprecated": true}, deprecated] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -1256,7 +1373,10 @@ async fn test_solver_deprecated_version(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_build_from_source_deprecated(#[case] mut solver: SolverImpl) { +async fn test_solver_build_from_source_deprecated( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when no appropriate build exists and the main package // has been deprecated, no source build should be allowed @@ -1280,6 +1400,7 @@ async fn test_solver_build_from_source_deprecated(#[case] mut solver: SolverImpl .await .unwrap(); repo.force_publish_recipe(&spec).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.set_binary_only(false); @@ -1306,6 +1427,7 @@ async fn test_solver_build_from_source_deprecated(#[case] mut solver: SolverImpl #[tokio::test] async fn test_solver_build_from_source_deprecated_and_impossible_initial_checks( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { // test when no appropriate build exists and the main package // has been deprecated, no source build should be allowed @@ -1330,6 +1452,7 @@ async fn test_solver_build_from_source_deprecated_and_impossible_initial_checks( .await .unwrap(); repo.force_publish_recipe(&spec).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.set_binary_only(false); @@ -1363,7 +1486,10 @@ async fn test_solver_build_from_source_deprecated_and_impossible_initial_checks( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_embedded_package_adds_request(#[case] mut solver: SolverImpl) { +async fn test_solver_embedded_package_adds_request( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when there is an embedded package // - the embedded package is added to the solution // - the embedded package is also added as a request in the resolve @@ -1377,6 +1503,7 @@ async fn test_solver_embedded_package_adds_request(#[case] mut solver: SolverImp }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("maya")); @@ -1403,7 +1530,10 @@ async fn test_solver_embedded_package_adds_request(#[case] mut solver: SolverImp #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_embedded_package_solvable(#[case] mut solver: SolverImpl) { +async fn test_solver_embedded_package_solvable( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when there is an embedded package // - the embedded package is added to the solution // - the embedded package resolves existing requests @@ -1422,6 +1552,7 @@ async fn test_solver_embedded_package_solvable(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("qt")); @@ -1443,7 +1574,10 @@ async fn test_solver_embedded_package_solvable(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_embedded_package_brings_parent(#[case] mut solver: SolverImpl) { +async fn test_solver_embedded_package_brings_parent( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let repo = make_repo!( [ { @@ -1453,6 +1587,7 @@ async fn test_solver_embedded_package_brings_parent(#[case] mut solver: SolverIm }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // Only requesting the embedded package here. @@ -1477,6 +1612,7 @@ async fn test_solver_embedded_package_brings_parent(#[case] mut solver: SolverIm #[tokio::test] async fn test_solver_embedded_package_brings_parent_with_required_var( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { let repo = make_repo!( [ @@ -1501,6 +1637,7 @@ async fn test_solver_embedded_package_brings_parent_with_required_var( }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // Only requesting the embedded package here. @@ -1523,7 +1660,10 @@ async fn test_solver_embedded_package_brings_parent_with_required_var( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_resolves_embedded_package_with_required_var(#[case] mut solver: SolverImpl) { +async fn test_solver_resolves_embedded_package_with_required_var( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let repo = make_repo!( [ { @@ -1553,6 +1693,7 @@ async fn test_solver_resolves_embedded_package_with_required_var(#[case] mut sol } ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("maya")); @@ -1576,7 +1717,10 @@ async fn test_solver_resolves_embedded_package_with_required_var(#[case] mut sol #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn resolve_src_package_with_embedded_package(#[case] mut solver: SolverImpl) { +async fn resolve_src_package_with_embedded_package( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let repo = make_repo!( [ { @@ -1589,6 +1733,7 @@ async fn resolve_src_package_with_embedded_package(#[case] mut solver: SolverImp }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("maya:src/2019.2/src")); @@ -1602,7 +1747,10 @@ async fn resolve_src_package_with_embedded_package(#[case] mut solver: SolverImp #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_embedded_package_unsolvable(#[case] mut solver: SolverImpl) { +async fn test_solver_embedded_package_unsolvable( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when there is an embedded package // - the embedded package is added to the solution // - the embedded package conflicts with existing requests @@ -1625,6 +1773,7 @@ async fn test_solver_embedded_package_unsolvable(#[case] mut solver: SolverImpl) }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-plugin")); @@ -1637,7 +1786,10 @@ async fn test_solver_embedded_package_unsolvable(#[case] mut solver: SolverImpl) #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_embedded_package_replaces_real_package(#[case] mut solver: SolverImpl) { +async fn test_solver_embedded_package_replaces_real_package( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when there is an embedded package // - the embedded package is added to the solution // - any dependencies from the "real" package aren't part of the solution @@ -1671,6 +1823,7 @@ async fn test_solver_embedded_package_replaces_real_package(#[case] mut solver: }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // Add qt to the request so "unwanted-dep" becomes part of the solution @@ -1701,6 +1854,7 @@ async fn test_solver_embedded_package_replaces_real_package(#[case] mut solver: #[tokio::test] async fn test_solver_initial_request_impossible_masks_embedded_package_solution( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { // test when an embedded package and its parent package are // requested and impossible checks are enabled for initial @@ -1723,6 +1877,7 @@ async fn test_solver_initial_request_impossible_masks_embedded_package_solution( }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // Ask for the embedded qt package first to ensure the embedded @@ -1755,6 +1910,7 @@ async fn test_solver_initial_request_impossible_masks_embedded_package_solution( #[tokio::test] async fn test_solver_impossible_request_but_embedded_package_makes_solvable( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { // test when there is an embedded package // - the initial request depends on the same package as the embedded package @@ -1799,6 +1955,7 @@ async fn test_solver_impossible_request_but_embedded_package_makes_solvable( } ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("needs")); @@ -1840,6 +1997,7 @@ async fn test_solver_impossible_request_but_embedded_package_makes_solvable( #[tokio::test] async fn test_multiple_packages_embed_same_package( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, #[values(true, false)] resolve_validation_impossible_checks: bool, ) { init_logging(); @@ -1874,6 +2032,7 @@ async fn test_multiple_packages_embed_same_package( }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("top-level")); @@ -1900,7 +2059,10 @@ async fn test_multiple_packages_embed_same_package( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_with_impossible_checks_in_build_keys(#[case] mut solver: SolverImpl) { +async fn test_solver_with_impossible_checks_in_build_keys( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { let options1 = option_map! {"dep" => "1.0.0"}; let options2 = option_map! {"dep" => "2.0.0"}; @@ -1925,6 +2087,7 @@ async fn test_solver_with_impossible_checks_in_build_keys(#[case] mut solver: So build1, build2]); repo.publish_recipe(&a_spec).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("pkg-top")); @@ -1943,7 +2106,10 @@ async fn test_solver_with_impossible_checks_in_build_keys(#[case] mut solver: So #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_some_versions_conflicting_requests(#[case] mut solver: SolverImpl) { +async fn test_solver_some_versions_conflicting_requests( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when there is a package with some version that have a conflicting dependency // - the solver passes over the one with conflicting // - the solver logs compat info for versions with conflicts @@ -1969,6 +2135,7 @@ async fn test_solver_some_versions_conflicting_requests(#[case] mut solver: Solv {"pkg": "python/3.7.3"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("my-lib")); @@ -1982,7 +2149,10 @@ async fn test_solver_some_versions_conflicting_requests(#[case] mut solver: Solv #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_embedded_request_invalidates(#[case] mut solver: SolverImpl) { +async fn test_solver_embedded_request_invalidates( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is resolved with an incompatible embedded pkg // - the solver tries to resolve the package // - there is a conflict in the embedded request @@ -2004,6 +2174,7 @@ async fn test_solver_embedded_request_invalidates(#[case] mut solver: SolverImpl {"pkg": "python/3.7.3"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("python")); @@ -2018,12 +2189,16 @@ async fn test_solver_embedded_request_invalidates(#[case] mut solver: SolverImpl #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_unknown_package_options(#[case] mut solver: SolverImpl) { +async fn test_solver_unknown_package_options( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is requested with specific options (eg: pkg.opt) // - the solver ignores versions that don't define the option // - the solver resolves versions that do define the option let repo = make_repo!([{"pkg": "my-lib/2.0.0"}]); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -2045,7 +2220,10 @@ async fn test_solver_unknown_package_options(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_var_requirements(#[case] mut solver: SolverImpl) { +async fn test_solver_var_requirements( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test what happens when a dependency is added which is incompatible // with an existing request in the stack let repo = make_repo!( @@ -2072,6 +2250,7 @@ async fn test_solver_var_requirements(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -2096,7 +2275,10 @@ async fn test_solver_var_requirements(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_var_requirements_unresolve(#[case] mut solver: SolverImpl) { +async fn test_solver_var_requirements_unresolve( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is resolved that conflicts in var requirements // - the solver should unresolve the solved package // - the solver should resolve a new version of the package with the right version @@ -2122,6 +2304,7 @@ async fn test_solver_var_requirements_unresolve(#[case] mut solver: SolverImpl) }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -2152,7 +2335,10 @@ async fn test_solver_var_requirements_unresolve(#[case] mut solver: SolverImpl) #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_build_options_dont_affect_compat(#[case] mut solver: SolverImpl) { +async fn test_solver_build_options_dont_affect_compat( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is resolved with some build option // - that option can conflict with another packages build options // - as long as there is no explicit requirement on that option's value @@ -2175,6 +2361,7 @@ async fn test_solver_build_options_dont_affect_compat(#[case] mut solver: Solver let repo = make_repo!([a_build, b_build,]); repo.publish_recipe(&a_spec).await.unwrap(); repo.publish_recipe(&b_spec).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -2202,7 +2389,10 @@ async fn test_solver_build_options_dont_affect_compat(#[case] mut solver: Solver #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_option_compat_intersection(#[case] mut solver: SolverImpl) { +async fn test_solver_option_compat_intersection( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // A var option for spi-platform/~2022.4.1.4 should be able to resolve // with a build of openimageio that requires spi-platform/~2022.4.1.3. @@ -2229,6 +2419,7 @@ async fn test_solver_option_compat_intersection(#[case] mut solver: SolverImpl) spi_platform_2022_4_1_4, openimageio_1_2_3, ]); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!({"var": "spi-platform/~2022.4.1.4"})); @@ -2241,7 +2432,10 @@ async fn test_solver_option_compat_intersection(#[case] mut solver: SolverImpl) #[case::step(step_solver())] // #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_components(#[case] mut solver: SolverImpl) { +async fn test_solver_components( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is requested with specific components // - all the aggregated components are selected in the resolve // - the final build has published layers for each component @@ -2270,6 +2464,7 @@ async fn test_solver_components(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("pkga")); @@ -2296,7 +2491,10 @@ async fn test_solver_components(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_components_interaction_with_embeds(#[case] mut solver: SolverImpl) { +async fn test_solver_components_interaction_with_embeds( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // Test that a package can have a component that embeds a specific // component of some other package. This package must be included in a // solution to satisfy a request for that package+component combo. @@ -2350,6 +2548,7 @@ async fn test_solver_components_interaction_with_embeds(#[case] mut solver: Solv }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // Deliberately not asking for comp2 of fake-pkg. This should be @@ -2383,7 +2582,10 @@ async fn test_solver_components_interaction_with_embeds(#[case] mut solver: Solv #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_components_when_no_components_requested(#[case] mut solver: SolverImpl) { +async fn test_solver_components_when_no_components_requested( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is requested with no components and the // package is one that has components // - the default component(s) should be the ones in the resolve @@ -2412,6 +2614,7 @@ async fn test_solver_components_when_no_components_requested(#[case] mut solver: }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("pkga")); @@ -2440,6 +2643,7 @@ async fn test_solver_components_when_no_components_requested(#[case] mut solver: #[tokio::test] async fn test_solver_src_package_request_when_no_components_requested( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, ) { // test when a /src package build is requested with no components // and a matching package with a /src package build exists in the repo @@ -2454,6 +2658,8 @@ async fn test_solver_src_package_request_when_no_components_requested( }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; + solver.add_repository(Arc::new(repo)); let req = pinned_request!("mypkg/1.2.3/src"); @@ -2470,7 +2676,10 @@ async fn test_solver_src_package_request_when_no_components_requested( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_all_component(#[case] mut solver: SolverImpl) { +async fn test_solver_all_component( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is requested with the 'all' component // - all the specs components are selected in the resolve // - the final build has published layers for each component @@ -2490,6 +2699,7 @@ async fn test_solver_all_component(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("python:all")); @@ -2513,7 +2723,10 @@ async fn test_solver_all_component(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_component_availability(#[case] mut solver: SolverImpl) { +async fn test_solver_component_availability( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a package is requested with some component // - all the specs components are selected in the resolve // - the final build has published layers for each component @@ -2562,6 +2775,7 @@ async fn test_solver_component_availability(#[case] mut solver: SolverImpl) { repo.publish_recipe(&spec373).await.unwrap(); repo.publish_recipe(&spec372).await.unwrap(); repo.publish_recipe(&spec371).await.unwrap(); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); solver.add_request(pinned_request!("python:bin")); @@ -2584,7 +2798,10 @@ async fn test_solver_component_availability(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_component_requirements(#[case] mut solver: SolverImpl) { +async fn test_solver_component_requirements( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a component has its own list of requirements // - the requirements are added to the existing set of requirements // - the additional requirements are resolved @@ -2608,6 +2825,7 @@ async fn test_solver_component_requirements(#[case] mut solver: SolverImpl) { {"pkg": "depr"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -2634,7 +2852,10 @@ async fn test_solver_component_requirements(#[case] mut solver: SolverImpl) { #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_component_requirements_extending(#[case] mut solver: SolverImpl) { +async fn test_solver_component_requirements_extending( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when an additional component is requested after a package is resolved // - the new components requirements are still added and resolved @@ -2652,6 +2873,7 @@ async fn test_solver_component_requirements_extending(#[case] mut solver: Solver {"pkg": "depc"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; solver.add_repository(Arc::new(repo)); // the initial resolve of this component will add no new requirements @@ -2669,7 +2891,10 @@ async fn test_solver_component_requirements_extending(#[case] mut solver: Solver #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_component_embedded(#[case] mut solver: SolverImpl) { +async fn test_solver_component_embedded( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when a component has its own list of embedded packages // - the embedded package is immediately selected // - it must be compatible with any previous requirements @@ -2707,6 +2932,7 @@ async fn test_solver_component_embedded(#[case] mut solver: SolverImpl) { }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo.clone()); @@ -2738,6 +2964,7 @@ async fn test_solver_component_embedded(#[case] mut solver: SolverImpl) { #[tokio::test] async fn test_solver_component_embedded_component_requirements( #[values(step_solver(), resolvo_solver())] mut solver: SolverImpl, + #[values(false, true)] use_index: bool, #[case] packages_to_request: &[&str], #[case] expected_solve_result: bool, ) { @@ -2768,11 +2995,13 @@ async fn test_solver_component_embedded_component_requirements( {"pkg": "dep-e2/1.0.0"}, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo); for package_to_request in packages_to_request { solver.add_request(pinned_request!(package_to_request)); + println!("Requesting: {package_to_request}"); } match run_and_print_resolve_for_tests(&mut solver) @@ -2797,6 +3026,7 @@ async fn test_solver_component_embedded_component_requirements( #[tokio::test] async fn test_solver_component_embedded_multiple_versions( #[values(step_solver(), resolvo_solver())] mut solver: SolverImpl, + #[values(false, true)] use_index: bool, #[case] package_to_request: &str, #[case] expected_solve_result: bool, ) { @@ -2822,18 +3052,21 @@ async fn test_solver_component_embedded_multiple_versions( }, {"pkg": "dep-e1/1.0.0"}, {"pkg": "dep-e1/2.0.0"}, + // Should solve { "pkg": "downstream1", "install": { "requirements": [{"pkg": "dep-e1/1.0.0"}, {"pkg": "mypkg:build"}] }, }, + // Should solve { "pkg": "downstream2", "install": { "requirements": [{"pkg": "dep-e1/2.0.0"}, {"pkg": "mypkg:run"}] }, }, + // Should not solve { "pkg": "downstream3", "install": { @@ -2842,6 +3075,7 @@ async fn test_solver_component_embedded_multiple_versions( }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo); @@ -2870,7 +3104,10 @@ async fn test_solver_component_embedded_multiple_versions( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn test_solver_component_embedded_incompatible_requests(#[case] mut solver: SolverImpl) { +async fn test_solver_component_embedded_incompatible_requests( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // test when different components of a package embedded packages that // make incompatible requests @@ -2895,6 +3132,7 @@ async fn test_solver_component_embedded_incompatible_requests(#[case] mut solver }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo); @@ -3078,7 +3316,7 @@ async fn test_version_number_masking( #[values(step_solver(), resolvo_solver())] mut solver: SolverImpl, #[case] color_to_solve_for: &str, #[case] expected_resolved_version: &str, - #[values(RepoKind::Mem, RepoKind::Spfs)] repo: RepoKind, + #[values(RepoKind::Mem, RepoKind::Spfs, RepoKind::IndexedMem)] repo: RepoKind, ) { init_logging(); @@ -3162,6 +3400,7 @@ async fn test_version_number_masking( #[tokio::test] async fn request_for_all_component_picks_correct_version( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, #[values("1.0.0", "2.0.0", "3.0.0")] version: &str, ) { // A request for :all component still controls for version compatibility @@ -3173,6 +3412,7 @@ async fn request_for_all_component_picks_correct_version( { "pkg": "mypkg/3.0.0" }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo); @@ -3189,7 +3429,10 @@ async fn request_for_all_component_picks_correct_version( #[case::step(step_solver())] #[case::resolvo(resolvo_solver())] #[tokio::test] -async fn build_options_not_checked_on_dependencies(#[case] mut solver: SolverImpl) { +async fn build_options_not_checked_on_dependencies( + #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, +) { // Suppose a platform exists let spi_platform_2024_1_1_1 = make_build!({"pkg": "spi-platform/2024.1.1.1", "compat": "x.x.a.b"}); @@ -3232,6 +3475,8 @@ async fn build_options_not_checked_on_dependencies(#[case] mut solver: SolverImp spi_platform_2025_1_1_1, ]); + let repo = wrap_repo_for_test(repo, use_index).await; + // Then we want to solve for my-app using the new platform version. // Although the only build of openimageio was built using the old platform, // it is expected to be resolvable when using the new platform. @@ -3258,6 +3503,7 @@ async fn build_options_not_checked_on_dependencies(#[case] mut solver: SolverImp #[tokio::test] async fn install_requirement_vars_found_in_solution( #[case] mut solver: SolverImpl, + #[values(true, false)] use_index: bool, // test both non-namespaced and namespaced var names #[values(opt_name!("varname"), opt_name!("pkg.varname"))] var_name: &OptName, ) { @@ -3271,6 +3517,7 @@ async fn install_requirement_vars_found_in_solution( }, ] ); + let repo = wrap_repo_for_test(repo, use_index).await; let repo = Arc::new(repo); solver.add_repository(repo); diff --git a/crates/spk-storage/Cargo.toml b/crates/spk-storage/Cargo.toml index ece0cfea9e..51cff3b019 100644 --- a/crates/spk-storage/Cargo.toml +++ b/crates/spk-storage/Cargo.toml @@ -18,10 +18,12 @@ workspace = true arc-swap = { workspace = true } async-trait = { workspace = true } async-stream = { workspace = true } +bytes = { workspace = true } colored = { workspace = true } dashmap = { workspace = true } data-encoding = "2.3.0" enum_dispatch = { workspace = true } +flatbuffers = { workspace = true } format_serde_error = { workspace = true, default-features = false, features = [ "colored", "serde_yaml", @@ -31,6 +33,7 @@ glob = { workspace = true } ignore = "0.4.18" indexmap = { workspace = true } itertools = { workspace = true } +memmap2 = { workspace = true } miette = { workspace = true } nom = { workspace = true } nom-supreme = { workspace = true } @@ -45,6 +48,8 @@ serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } serde_yaml = { workspace = true } spfs = { workspace = true } +spk-config = { workspace = true } +spk-proto = { workspace = true } spk-schema = { workspace = true } sys-info = "0.9.0" tar = "0.4.45" @@ -59,3 +64,5 @@ variantly = { workspace = true } [dev-dependencies] spfstest = { workspace = true } +rstest = { workspace = true } +spk-solve-macros = { workspace = true } diff --git a/crates/spk-storage/src/error.rs b/crates/spk-storage/src/error.rs index 61cf1eeacb..d41d00875d 100644 --- a/crates/spk-storage/src/error.rs +++ b/crates/spk-storage/src/error.rs @@ -63,10 +63,35 @@ pub enum Error { #[error(transparent)] #[diagnostic(forward(0))] SpkSpecError(Box), + #[error(transparent)] + #[diagnostic(forward(0))] + SpkConfigError(#[from] spk_config::Error), #[error("No disk usage: version '{0}' not found")] DiskUsageVersionNotFound(String), #[error("No disk usage: build '{0}' not found")] DiskUsageBuildNotFound(String), + + #[error("Unable to open flatbuffer index file for repo: {0}")] + IndexOpenError(#[source] std::io::Error), + #[error("Unable to memory map flatbuffer index from repo file: {0}")] + IndexMemMapError(#[source] std::io::Error), + #[error("Unable to write '{0}' repo's index, '{1}': {2}")] + IndexWriteError(String, String, #[source] std::io::Error), + #[error( + "Cannot generate an index from this repo: It is not a spk MemoryRepository or SpfsRepository" + )] + IndexGenerationInMemError(), + #[error("'{0}' repo does not have a index file location: {1}")] + IndexNoRepoPathError(String, String), + #[error("No index location for the '{0}' repo. It is a {1} repository")] + IndexNoRepoLocationError(String, String), + #[error("Failed to load flatbuffer index: {0}")] + IndexFailedToLoad(String), + #[error("Failed to generate flatbuffer index in memory: {0}")] + IndexFailedToGenerate(String), + #[error("Unknown index kind: '{0}', unable to {1}load that kind of index")] + IndexUnknownKind(String, String), + #[error("{0}")] String(String), } diff --git a/crates/spk-storage/src/fixtures.rs b/crates/spk-storage/src/fixtures.rs index 9ba97719db..cdf39b74c2 100644 --- a/crates/spk-storage/src/fixtures.rs +++ b/crates/spk-storage/src/fixtures.rs @@ -13,8 +13,7 @@ use spfs::prelude::*; use spk_schema::foundation::fixtures::*; use tokio::sync::{Mutex, MutexGuard}; -use crate as storage; -use crate::NameAndRepository; +use crate::{self as storage, IndexedRepository, NameAndRepository}; static SPFS_RUNTIME_LOCK: Lazy> = Lazy::new(|| Mutex::new(())); @@ -54,6 +53,7 @@ impl Drop for RuntimeLock { pub enum RepoKind { Mem, Spfs, + IndexedMem, } /// A temporary repository of some type for use in testing @@ -134,6 +134,17 @@ pub async fn make_repo(kind: RepoKind) -> TempRepo { ) } RepoKind::Mem => storage::RepositoryHandle::new_mem(), + RepoKind::IndexedMem => { + let repo = storage::RepositoryHandle::new_mem(); + + let mut indexed_repo = IndexedRepository::generate_from_repo(repo.into()) + .await + .unwrap(); + // For the publishing/writing tests + indexed_repo.set_update_index_after_any_publish(true); + + storage::RepositoryHandle::Indexed(indexed_repo) + } }; let repo = Arc::new(repo); diff --git a/crates/spk-storage/src/lib.rs b/crates/spk-storage/src/lib.rs index 200f4bfb12..9337ff3842 100644 --- a/crates/spk-storage/src/lib.rs +++ b/crates/spk-storage/src/lib.rs @@ -24,10 +24,13 @@ pub use disk_usage::{ pub use error::{Error, InvalidPackageSpec, Result}; pub use storage::{ CachePolicy, + FlatBufferRepoIndex, + IndexedRepository, MemRepository, NameAndRepository, Repository, RepositoryHandle, + RepositoryIndexMut, RuntimeRepository, SpfsRepository, Storage, diff --git a/crates/spk-storage/src/storage/flatbuffer_index.rs b/crates/spk-storage/src/storage/flatbuffer_index.rs new file mode 100644 index 0000000000..15ddce4cd9 --- /dev/null +++ b/crates/spk-storage/src/storage/flatbuffer_index.rs @@ -0,0 +1,1289 @@ +// Copyright (c) Contributors to the SPK project. +// SPDX-License-Identifier: Apache-2.0 +// https://github.com/spkenv/spk + +use std::collections::{HashMap, HashSet}; +#[cfg(unix)] +use std::fs::Permissions; +#[cfg(unix)] +use std::os::unix::fs::PermissionsExt; +use std::path::PathBuf; +use std::sync::Arc; +use std::time::Instant; + +use futures::TryStreamExt; +use itertools::Itertools; +use spfs::prelude::FromUrl; +use spk_schema::foundation::ident_component::Component; +use spk_schema::foundation::name::{PkgName, PkgNameBuf}; +use spk_schema::foundation::version::Version; +use spk_schema::ident::VersionIdent; +use spk_schema::name::{OptNameBuf, RepositoryName}; +use spk_schema::prelude::Versioned; +use spk_schema::{ + BuildIdent, + Components, + Deprecate, + IndexedPackage, + OptionValues, + Package, + PinnedRequest, + RequestWithOptions, + Spec, + SpecTest, + build_to_fb_build, + compat_to_fb_compat, + component_specs_to_fb_component_specs, + components_to_fb_components, + embedded_pkg_specs_to_fb_embedded_package_specs, + fb_component_names_to_component_names, + fb_version_to_version, + flatbuffer_vector, + get_build_from_fb_build_index, + opts_to_fb_opts, + requirements_with_options_to_fb_requirements_with_options, + version_to_fb_version, +}; + +use super::repository::Repository; +use crate::storage::{RepositoryIndex, RepositoryIndexMut}; +use crate::{Error, RepoWalkerBuilder, RepoWalkerItem, Result}; + +#[cfg(test)] +#[path = "./flatbuffer_index_test.rs"] +mod flatbuffer_index_test; + +// Index schema version supported by spk +const COMPATIBLE_INDEX_SCHEMA_VERSION: u32 = 1; + +// Index name and kind constants +pub const FLATBUFFER_INDEX: &str = "flatb"; + +const INDEX_FILE_PREFIX: &str = "index"; +const INDEX_FILE_EXT: &str = "fb"; + +const INDEX_SUB_DIR: &str = "index"; +const SPK_INDEX_SUB_DIR_NAME: &str = "spk"; + +// Flatbuffer builder constants +const DEFAULT_CAPACITY: usize = 1024; + +// Flatbuffer verifier constants +const MAX_FLATBUFFER_TABLES: usize = 100000000; +const DO_NOT_VERIFY: bool = false; +const VERIFY: bool = true; + +/// Helper function for removing a file with error conversion +async fn remove_index_file(filepath: &PathBuf) -> Result<()> { + if let Err(err) = tokio::fs::remove_file(filepath).await { + match err.kind() { + std::io::ErrorKind::NotFound => Ok(()), + _ => Err(Error::String(format!( + "Unable to remove existing index file '{}' due to: {err}", + filepath.display() + ))), + } + } else { + Ok(()) + } +} + +#[derive(Debug, Default)] +pub struct FlatBufferRepoIndex { + // The bytes of the index, usually read from a file + data_buffer: bytes::Bytes, +} + +impl Clone for FlatBufferRepoIndex { + fn clone(&self) -> Self { + Self { + data_buffer: self.data_buffer.clone(), + } + } +} +impl FlatBufferRepoIndex { + /// Verify the flatbuffer data is a valid repository index + fn check_fb_index(&self) -> Result> { + let verifier_opts = flatbuffers::VerifierOptions { + // The default number of tables seems to be 1 million, and + // that isn't enough for the current index. Tried 100 + // million and that didn't error, but this is not a great + // solution long term, especially once VersionFilters are + // converted from strings. + max_tables: MAX_FLATBUFFER_TABLES, + ..Default::default() + }; + spk_proto::root_as_repository_index_with_opts(&verifier_opts, &self.data_buffer) + .map_err(|err| Error::String(format!("Error checking flatbuffer index: {err}"))) + // Note: the _unchecked version would be: + // spk_proto::root_as_repository_index(&self.data_buffer) + // .map_err(|err| Error::String(format!("Error checking flatbuffer index: {err}"))) + } + + /// Return the RepositoryIndex data root + fn fb_index(&self) -> spk_proto::RepositoryIndex<'_> { + // Assumes check_fb_index was called and the buffer is valid + unsafe { spk_proto::root_as_repository_index_unchecked(&self.data_buffer) } + } + + /// Make a flatbuffer repo index from the given bytes + fn try_from_bytes( + name: &str, + data_buffer: bytes::Bytes, + verify_before_use: bool, + ) -> Result { + let start = Instant::now(); + let index = FlatBufferRepoIndex { data_buffer }; + tracing::debug!( + "'{name}' repo index flatbuffer to RI struct : {} secs", + start.elapsed().as_secs_f64() + ); + + // Optionally, verify the flatbuffers data before use + if verify_before_use { + let start_check_fb = Instant::now(); + index.check_fb_index()?; + tracing::debug!( + "'{name}' repo index checked as flatb RepositoryIndex: {} secs", + start_check_fb.elapsed().as_secs_f64() + ); + } else { + tracing::debug!("'{name}' repo index not verified before use : 0.0 secs"); + } + + // Check the index's schema version to ensure it is compatible + // with this spk's version. + if index.fb_index().index_schema_version() != COMPATIBLE_INDEX_SCHEMA_VERSION { + return Err(Error::String(format!( + "Index schema is version ({}) is not compatible with spk schema version ({})", + index.fb_index().index_schema_version(), + COMPATIBLE_INDEX_SCHEMA_VERSION + ))); + } + + tracing::debug!( + "'{name}' repo index flatbuffer total time : {} secs", + start.elapsed().as_secs_f64() + ); + + Ok(index) + } + + /// Create a FlatBufferRepoIndex from an index file in the repository + pub async fn from_repo_file(repo: &crate::RepositoryHandle) -> Result { + let filepath = Self::repo_index_location(repo).await?; + + let name = repo.name(); + tracing::debug!( + "Reading repo index file for: '{name}' from filepath: '{}'", + filepath.display() + ); + + // Based on the configuration setting, decide whether to + // verify the flatbuffer data before use. + let config = spk_config::get_config()?; + + FlatBufferRepoIndex::read_index_from_file( + name, + &filepath, + config.solver.indexes.verify_before_use, + ) + .await + } + + async fn read_index_from_file( + name: &RepositoryName, + filepath: &PathBuf, + verify_index: bool, + ) -> Result { + let start = Instant::now(); + + // Open and Memory map the data file + let file = match std::fs::File::open(filepath) { + Ok(f) => f, + Err(err) => { + return Err(Error::IndexOpenError(err)); + } + }; + + let memory_map = match unsafe { memmap2::Mmap::map(&file) } { + Ok(mm) => mm, + Err(err) => { + return Err(Error::IndexMemMapError(err)); + } + }; + tracing::debug!( + "'{name}' repo index flatbuffer memmapped in : {} secs", + start.elapsed().as_secs_f64() + ); + + let start_bytes = Instant::now(); + let data_buffer = bytes::Bytes::from_owner(memory_map); + tracing::debug!( + "'{name}' repo index flatbuffer bytes from_ow: {} secs", + start_bytes.elapsed().as_secs_f64() + ); + + let index = FlatBufferRepoIndex::try_from_bytes(name, data_buffer, verify_index)?; + + Ok(index) + } + + /// Helper for generating indexes for testing. The index returned + /// will not have been saved to a file. + pub async fn from_repo_in_memory( + repo: &crate::RepositoryHandle, + ) -> miette::Result { + let start = Instant::now(); + + let source_repo: crate::RepositoryHandle = match repo { + crate::RepositoryHandle::SPFS(_) | crate::RepositoryHandle::Mem(_) => repo.clone(), + _ => { + return Err(Error::IndexGenerationInMemError().into()); + } + }; + + let name = repo.name(); + let repos = vec![(repo.name().to_string(), source_repo)]; + + let (packages, global_vars) = + FlatBufferRepoIndex::gather_all_data_from_repo(&repos).await?; + + tracing::info!( + "'{name}' repo data gathered from memory in: {} secs", + start.elapsed().as_secs_f64() + ); + + let start_bytes = Instant::now(); + let builder = + FlatBufferRepoIndex::generate_index_builder(repo, packages, global_vars).await?; + + let bytes = builder.finished_data().to_vec(); + let data_buffer = bytes::Bytes::from(bytes); + tracing::info!( + "'{name}' repo index bytes generated in: {} secs", + start_bytes.elapsed().as_secs_f64() + ); + + // The does not verify the index because we just made it, in + // memory, and we know it is valid. + let index = FlatBufferRepoIndex::try_from_bytes(name, data_buffer, DO_NOT_VERIFY)?; + + Ok(index) + } + + /// Internal method to create a valid Spec from a package held in the + /// index data as a flatbuffer backed IndexedPackage. + fn make_solver_package_spec( + &self, + package_build: BuildIdent, + fb_build_index: &spk_proto::BuildIndex, + ) -> Result { + let build_spec = IndexedPackage::new( + package_build, + // This is cheap because it is a bytes::Bytes + self.data_buffer.clone(), + fb_build_index._tab.loc(), + ); + + Ok(Spec::V0IndexedPackage(Box::new(build_spec))) + } + + /// Gather packages and global vars from the given repos. + async fn gather_all_data_from_repo( + repos: &Vec<(String, crate::RepositoryHandle)>, + ) -> miette::Result<(HashMap, GlobalVarsInfo)> { + if repos.len() != 1 { + return Err(Error::String( + "FlatBufferRepoIndex's gather_all_data_from_repo() method only works on one repo at a time" + .to_string(), + ).into()); + } + + // Gather all the recipes and builds, including src and deprecated + let mut repo_walker_builder = RepoWalkerBuilder::new(repos); + let repo_walker = repo_walker_builder + .with_report_on_versions(true) + .with_report_on_builds(true) + .with_report_src_builds(true) + .with_report_deprecated_builds(true) + .with_report_embedded_builds(true) + .with_end_of_markers(true) + .with_sort_objects(true) + .with_continue_on_error(true) + .build(); + + let mut packages: HashMap = HashMap::new(); + let mut global_vars = GlobalVarsInfo::default(); + + // Needed for filtering global vars and checking the real + // published components. + let repo = &repos[0].1; + + let package_names = HashSet::from_iter(repo.list_packages().await?); + + let mut num_versions = 0; + let mut num_builds = 0; + + let mut traversal = repo_walker.walk(); + while let Some(item) = traversal.try_next().await? { + match item { + RepoWalkerItem::Version(version) => { + let name = version.ident.name(); + let v = version.ident.version().clone(); + + let pkg_info = packages.entry(name.into()).or_default(); + pkg_info.versions.push(v.clone()); + let _ver_info = pkg_info.version_builds.entry(v).or_default(); + num_versions += 1; + } + RepoWalkerItem::Build(build) => { + // Add a build spec and related things + let build_ident = build.spec.ident(); + + let pkg_info = packages.entry(build_ident.name().into()).or_default(); + let ver_info = pkg_info + .version_builds + .entry(build_ident.version().clone()) + .or_default(); + let spec = build.spec.clone(); + + let component_map = match repo.read_components(build.spec.ident()).await { + Ok(c) => c, + Err(err) => { + tracing::warn!( + "Problem reading published components for '{}': {err}. Skipping it.", + build.spec.ident() + ); + continue; + } + }; + let published_components = component_map.keys().cloned().collect(); + + let build_info = BuildInfo { + spec, + published_components, + }; + ver_info.build_specs.push(build_info); + + global_vars.extract_global_vars(&build.spec, &package_names)?; + + num_builds += 1; + } + + // Ignore everything else + _ => {} + } + } + + // Debugging and logging + let mut vars: Vec = global_vars.keys().map(|k| k.to_string()).collect(); + vars.sort(); + tracing::info!("Globals found:\n\t{}", vars.into_iter().join("\n\t")); + + tracing::info!( + "Index for '{}' repo consists of {} packages, {} versions, {} builds, with {} global vars", + repo.name(), + packages.len(), + num_versions, + num_builds, + global_vars.keys().len() + ); + + Ok((packages, global_vars)) + } + + /// Internal method to get the current information on the builds + /// of package version and any global vars they provide. Useful + /// when updating an existing index. + async fn gather_updates_from_repo( + &self, + repo: &crate::RepositoryHandle, + package_version: &VersionIdent, + ) -> miette::Result<(HashMap, GlobalVarsInfo)> { + let start = Instant::now(); + + let package_name_to_update = package_version.name().to_owned(); + let arc_version_to_update = Arc::new(package_version.version().clone()); + + let mut packages: HashMap = HashMap::new(); + + let mut num_versions = 0; + let mut num_builds = 0; + + // Gather the existing global vars. Any new ones from the updated + // package version will be added when its builds are processed. + let mut global_vars = GlobalVarsInfo(self.get_global_var_values()); + + // Used to work out the order of processing and where to pull + // in the updates from. + let mut package_names = self.list_packages().await?; + + // Check update package name and make sure it is in the names list. + if !package_names.contains(&package_name_to_update) { + tracing::debug!("'{package_name_to_update}' not in package_names, injecting it",); + package_names.push(package_name_to_update.clone()); + } + + // Now all the names are present, make a set to help with global variables + let package_names_set = HashSet::from_iter(package_names.clone()); + + // Process the packages, checking for the one to update and + // pulling from the correct data source for each. + for name in &package_names { + let p_v = self.list_package_versions(name).await?; + let mut package_versions = (*p_v).clone(); + + if package_name_to_update == *name && !package_versions.contains(&arc_version_to_update) + { + tracing::debug!("{arc_version_to_update} not in package_versions, injecting it",); + package_versions.push(arc_version_to_update.clone()); + package_versions.sort_by_cached_key(|v| std::cmp::Reverse(v.clone())); + } + + let pkg_info = packages.entry(name.clone()).or_default(); + + for version in package_versions.iter() { + pkg_info.versions.push((**version).clone()); + + let ver_info = pkg_info + .version_builds + .entry((**version).clone()) + .or_default(); + num_versions += 1; + + let version_ident = VersionIdent::new(name.clone(), (**version).clone()); + + // Check if this is the version we want to update + if package_name_to_update == *name && arc_version_to_update == *version { + tracing::info!("Reached the {version} of the package to update"); + // Get the updated data from the repo + let version_builds = repo.list_package_builds(&version_ident).await?; + + for build_ident in version_builds { + let build_spec = repo.read_package_from_storage(&build_ident).await?; + let spec = build_spec.clone(); + + let component_map = repo + .read_components_from_storage(build_spec.ident()) + .await?; + let published_components = component_map.keys().cloned().collect(); + + let build_info = BuildInfo { + spec, + published_components, + }; + + ver_info.build_specs.push(build_info); + num_builds += 1; + + // Check the updated build spec for an additional global vars + global_vars.extract_global_vars(&build_spec, &package_names_set)?; + } + } else { + // Use what's there in the flatbuffer index + let version_builds = self.list_package_builds(&version_ident).await?; + + for build_ident in version_builds { + let build_spec = self.get_package_build_spec(&build_ident)?; + let spec = build_spec.clone(); + let published_components = build_spec + .components() + .iter() + .map(|c| c.name.clone()) + .collect(); + let build_info = BuildInfo { + spec, + published_components, + }; + + ver_info.build_specs.push(build_info); + num_builds += 1; + } + }; + } + } + + tracing::info!( + "Updated index data gathered in in: {} secs", + start.elapsed().as_secs_f64() + ); + + tracing::info!( + "Index for '{}' repo consists of {} packages, {} versions, {} builds, with {} global vars", + repo.name(), + packages.len(), + num_versions, + num_builds, + global_vars.keys().len() + ); + + Ok((packages, global_vars)) + } + + /// Internal method to create a flatbuffers builder for an index + /// without saving it anywhere. + async fn generate_index_builder( + repo: &crate::RepositoryHandle, + repo_packages: HashMap, + global_vars_info: GlobalVarsInfo, + ) -> Result> { + let start = Instant::now(); + + // Gather up the data for the packages field + let mut packages = Vec::new(); + let mut builder = flatbuffers::FlatBufferBuilder::with_capacity(DEFAULT_CAPACITY); + + let mut package_names: Vec = repo_packages.keys().map(Clone::clone).collect(); + package_names.sort(); + + for name in package_names { + if let Some(pkg_info) = repo_packages.get(&name) { + tracing::debug!("package: {name} [{} versions]", pkg_info.versions.len()); + let package = pkg_info.to_fb_package_index(&mut builder, &name); + packages.push(package); + }; + } + let fb_packages = flatbuffer_vector!(builder, packages); + + // Gather up the global vars field data + let fb_global_vars = global_vars_info.convert_to_fb(&mut builder); + + // Build the repository index, the root object for the flatbuffer + let index = spk_proto::RepositoryIndex::create( + &mut builder, + &spk_proto::RepositoryIndexArgs { + index_schema_version: COMPATIBLE_INDEX_SCHEMA_VERSION, + packages: fb_packages, + global_vars: fb_global_vars, + }, + ); + + // Finish out the flatbuffer generation + builder.finish(index, None); + + tracing::info!( + "flatbuffer index for '{}' assembled in : {} secs", + repo.name(), + start.elapsed().as_secs_f64() + ); + + Ok(builder) + } + + /// This will create the index path inside the repo, for spk + /// indexes, if it does not exist. + async fn get_index_path_from_repo_address( + repo_name: &str, + address_url: &url::Url, + ) -> Result { + // Only handles urls that can parse as fs repo configs. Other + // repository types do not support storing index files. + let spfs_repo_config = match spfs::storage::fs::Config::from_url(address_url).await { + Ok(c) => c, + Err(err) => { + return Err(Error::IndexNoRepoPathError( + repo_name.to_string(), + err.to_string(), + )); + } + }; + + // TODO: consider making the base index path configurable, + // with the default being the repo base path + /index/spk. + let mut index_path = PathBuf::new(); + index_path.push(spfs_repo_config.path); + + index_path.push(INDEX_SUB_DIR); + spfs::runtime::makedirs_with_perms(&index_path, 0o777).map_err(|source| { + Error::String(format!( + "Unable to make '{INDEX_SUB_DIR}' sub-dir in '{repo_name}' repo: {source}" + )) + })?; + + index_path.push(SPK_INDEX_SUB_DIR_NAME); + spfs::runtime::makedirs_with_perms(&index_path, 0o777) + .map_err(|source| Error::String(format!("Unable to make {SPK_INDEX_SUB_DIR_NAME} sub-dir in '{repo_name}'s index directory: {source}")))?; + + Ok(index_path) + } + + async fn repo_index_location(repo: &crate::RepositoryHandle) -> Result { + let base_path = match repo { + crate::RepositoryHandle::SPFS(spfs_repo) => { + Self::get_index_path_from_repo_address(spfs_repo.name(), spfs_repo.address()) + .await? + } + + crate::RepositoryHandle::Mem(mem_repo) => { + // A mem repo doesn't have a usable location for files + return Err(Error::IndexNoRepoLocationError( + mem_repo.name().to_string(), + "Spk Mem".to_string(), + )); + } + + crate::RepositoryHandle::Runtime(runtime_repo) => { + // A spfs runtime repo doesn't have a usable location + // for files. + return Err(Error::IndexNoRepoLocationError( + runtime_repo.name().to_string(), + "Spk Runtime".to_string(), + )); + } + + crate::RepositoryHandle::Indexed(indexed_repo) => { + // Indexed repositories are store their index data + // based on the repo they wrap so use the underlying + // repo's location for indexes. + Self::get_index_path_from_repo_address(indexed_repo.name(), indexed_repo.address()) + .await? + } + }; + + let mut index_path = PathBuf::new(); + index_path.push(base_path); + // Index file name contains the index schema version for ease + // of identifying a compatible index. The index version is + // also checked later when the bytes are turned into an index + // in memory. + index_path.push(format!( + "{INDEX_FILE_PREFIX}_v{COMPATIBLE_INDEX_SCHEMA_VERSION}.{INDEX_FILE_EXT}", + )); + + tracing::debug!( + "{}'s index file location is: {}", + repo.name(), + index_path.display() + ); + + Ok(index_path) + } + + /// Save the index data to a file in the repo. This will save the + /// index data to a temporary file, read it back in and verify it, + /// remove the old index file if any, and move the new temp file + /// into its place. + async fn save_index<'a>( + repo: &crate::RepositoryHandle, + builder: &flatbuffers::FlatBufferBuilder<'a>, + ) -> Result { + let name = repo.name(); + + let filepath = Self::repo_index_location(repo).await?; + let temp_file = PathBuf::from(format!( + "{}_being_generated_{}", + filepath.display(), + ulid::Ulid::new() + )); + + tracing::debug!("Index file path: {}", filepath.display()); + tracing::debug!("Index temp file: {}", temp_file.display()); + + // Create the new index in a temp file with the correct permissions + if let Err(err) = tokio::fs::write(&temp_file, builder.finished_data()).await { + // Clean up the temp file after the error + if let Err(temp_err) = remove_index_file(&temp_file).await { + tracing::error!("Unable to remove temp index file due to: {temp_err}"); + } + return Err(Error::IndexWriteError( + name.to_string(), + temp_file.display().to_string(), + err, + )); + } else { + // Ensure the file is readable and writable by everyone + #[cfg(unix)] + match tokio::fs::set_permissions(&temp_file, Permissions::from_mode(0o666)).await { + Err(err) => { + // Clean up the temp file after the error + if let Err(temp_err) = remove_index_file(&temp_file).await { + tracing::error!("Unable to remove temp index file due to: {temp_err}"); + }; + Err(Error::FileOpenError(temp_file.clone(), err)) + } + Ok(ok) => Ok(ok), + }?; + + // Read in and verify the index + let _ = FlatBufferRepoIndex::read_index_from_file(name, &temp_file, VERIFY).await?; + } + + // Delete the old index file, if any. This should not impact + // existing processes using that index. + if let Err(err) = remove_index_file(&filepath).await { + // Clean up the temp file after the error + if let Err(temp_err) = remove_index_file(&temp_file).await { + tracing::error!("Unable to remove temp index file due to: {temp_err}"); + } + return Err(err); + } + + // Move the index file to the correct place. + if let Err(err) = tokio::fs::rename(&temp_file, &filepath).await { + // Clean up the temp file after the error + if let Err(temp_err) = remove_index_file(&temp_file).await { + tracing::error!("Unable to remove temp index file due to: {temp_err}"); + } + return Err(Error::String(format!( + "Unable to rename new temp index file '{}' to '{}' due to: {err}", + temp_file.display(), + filepath.display() + ))); + } + + Ok(filepath) + } +} + +/// Helper used while generating an index +#[derive(Debug)] +struct BuildInfo { + spec: Arc, + published_components: Vec, +} + +impl BuildInfo { + /// Convert the data in a BuildInfo object into the flatbuffers + /// equivalent, a BuildIndex + fn to_fb_build_index<'a>( + &self, + builder: &mut flatbuffers::FlatBufferBuilder<'a>, + ) -> flatbuffers::WIPOffset> { + let build_spec = &self.spec; + let ident = build_spec.ident(); + + let fb_build_version = version_to_fb_version(builder, ident.version()); + + let (fb_build_id, fb_build_type) = build_to_fb_build(builder, ident.build()); + + // Deciding whether to store all the data for a deprecated build. + // For now, deprecated builds are being fully stored in the + // index at the same level of detail as non-deprecated packages. + let store_deprecated_as_placeholders = false; + //let store_deprecated_as_placeholders = build_spec.is_deprecated(); + + let fb_build_index_args = if store_deprecated_as_placeholders { + let fb_compat = compat_to_fb_compat(builder, &build_spec.compat()); + + spk_proto::BuildIndexArgs { + published_version: Some(fb_build_version), + build: Some(fb_build_id), + build_type: fb_build_type, + is_deprecated: build_spec.is_deprecated(), + compat: fb_compat, + // Does not store any more details because the package + // is deprecated. + build_options: None, + runtime_requirements: None, + embedded: None, + component_specs: None, + published_components: None, + } + } else { + let fb_compat = compat_to_fb_compat(builder, &build_spec.compat()); + + let fb_build_options = opts_to_fb_opts(builder, &build_spec.get_build_options()); + + let fb_requirements = requirements_with_options_to_fb_requirements_with_options( + builder, + &build_spec.runtime_requirements(), + ); + + let fb_embedded_specs = + embedded_pkg_specs_to_fb_embedded_package_specs(builder, &build_spec.embedded()); + + let fb_published_components = + components_to_fb_components(builder, &self.published_components); + + let fb_component_specs = + component_specs_to_fb_component_specs(builder, &build_spec.components()); + + spk_proto::BuildIndexArgs { + published_version: Some(fb_build_version), + build: Some(fb_build_id), + build_type: fb_build_type, + is_deprecated: build_spec.is_deprecated(), + compat: fb_compat, + build_options: fb_build_options, + runtime_requirements: fb_requirements, + embedded: fb_embedded_specs, + component_specs: fb_component_specs, + published_components: fb_published_components, + } + }; + + spk_proto::BuildIndex::create(builder, &fb_build_index_args) + } +} + +/// Helper used while generating an index +#[derive(Debug, Default)] +struct VersionInfo { + build_specs: Vec, +} + +impl VersionInfo { + /// Convert a VersionInfo object and a Version number into a + /// flatbuffers VersionIndex. + fn to_fb_version_index<'a>( + &self, + builder: &mut flatbuffers::FlatBufferBuilder<'a>, + version: &Version, + ) -> flatbuffers::WIPOffset> { + let fb_version = version_to_fb_version(builder, version); + + // Get the builds together + let mut builds = Vec::new(); + for build_info in &self.build_specs { + let build_index = build_info.to_fb_build_index(builder); + builds.push(build_index); + } + + let fb_builds = flatbuffer_vector!(builder, builds); + + // Put this version together + spk_proto::VersionIndex::create( + builder, + &spk_proto::VersionIndexArgs { + version: Some(fb_version), + builds: fb_builds, + }, + ) + } +} + +/// Helper used while generating an index +#[derive(Debug, Default)] +struct PackageInfo { + // To keep the highest to lowest version order from the walker + versions: Vec, + version_builds: HashMap, +} + +impl PackageInfo { + /// Convert a PackageInfo object and a name into a flatbuffers + /// PackageIndex object. + fn to_fb_package_index<'a>( + &self, + builder: &mut flatbuffers::FlatBufferBuilder<'a>, + name: &PkgNameBuf, + ) -> flatbuffers::WIPOffset> { + let package_name = builder.create_string(name.as_ref()); + + // Get the versions together + let mut versions = Vec::new(); + for v in &self.versions { + let version_info = if let Some(ver_info) = self.version_builds.get(v) { + ver_info + } else { + // No builds in this version. So no version data to + // index, skipping it. + continue; + }; + + let version_index = version_info.to_fb_version_index(builder, v); + versions.push(version_index); + } + let fb_versions = flatbuffer_vector!(builder, versions); + + // Put the package together + spk_proto::PackageIndex::create( + builder, + &spk_proto::PackageIndexArgs { + name: Some(package_name), + versions: fb_versions, + }, + ) + } +} + +/// Helper used while generating an index +#[derive(Clone, Debug, Default)] +pub struct GlobalVarsInfo(HashMap>); + +impl GlobalVarsInfo { + /// Helper for seeing the names of the vars + fn keys(&self) -> std::collections::hash_map::Keys<'_, OptNameBuf, HashSet> { + self.0.keys() + } + + /// Store a new value for a particular named option. This doesn't + /// do any checking it assumes the caller did that. + fn add_new_value(&mut self, name: OptNameBuf, new_value: String) -> &mut Self { + let entry = self.0.entry(name).or_default(); + entry.insert(new_value); + self + } + + /// Extract and store all the global variables and possible values + /// from the given package build spec. The package names set is + /// used to filter out package specific variables. + /// Note: This cannot be run on package build specs from an index + /// because they don't implement get_all_tests(). + fn extract_global_vars( + &mut self, + build_spec: &Spec, + package_names: &HashSet, + ) -> Result<()> { + // Check the build options for global vars + for (name, value) in build_spec.option_values() { + if name.namespace().is_none() { + let var_name = name.without_namespace().to_owned(); + // Filter out packages + if package_names.contains(&var_name.to_string()) { + continue; + } + self.add_new_value(var_name, value); + } + } + + // Check install requirements for var requests that could also + // be global vars. + for var_req in build_spec + .runtime_requirements() + .iter() + .filter_map(|r| match r { + RequestWithOptions::Pkg(_p) => None, + RequestWithOptions::Var(v) => Some(v), + }) + { + let var_name = var_req.var.without_namespace().to_owned(); + // Filter out packages + if package_names.contains(&var_name.to_string()) { + continue; + } + self.add_new_value(var_name, var_req.value.to_string()); + } + + // Requirements from embedded are skipped because they are + // returned with all the other walked builds. + + // Check the test install requirements + for test_spec in build_spec.get_all_tests() { + match test_spec { + SpecTest::V0(ts) => { + ts.requirements + .iter() + .filter_map(|r| match r { + PinnedRequest::Pkg(_p) => None, + PinnedRequest::Var(v) => Some(v), + }) + .for_each(|var_req| { + let var_name = var_req.var.without_namespace().to_owned(); + // Filter out variables that use packages names + if !package_names.contains(&var_name.to_string()) { + self.add_new_value(var_name, var_req.value.to_string()); + } + }); + } + } + } + + Ok(()) + } + + /// Adds this global variables data to the given flatbuffer + /// repository index builder. + fn convert_to_fb<'a>( + &self, + builder: &mut flatbuffers::FlatBufferBuilder<'a>, + ) -> Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + > { + let mut global_vars = Vec::new(); + + let mut var_names: Vec<_> = self.0.keys().collect(); + var_names.sort(); + + for name in var_names.into_iter() { + if let Some(values) = self.0.get(name) { + let mut sorted_values: Vec<_> = values.iter().collect(); + sorted_values.sort(); + + // Create the name and value strings + let var_name = builder.create_string(name); + let mut values = Vec::new(); + for v in sorted_values { + let val = builder.create_string(v); + values.push(val); + } + let fb_values = flatbuffer_vector!(builder, values); + + // Create this global var record + let global_var = spk_proto::GlobalVar::create( + builder, + &spk_proto::GlobalVarArgs { + name: Some(var_name), + values: fb_values, + }, + ); + + global_vars.push(global_var); + }; + } + + flatbuffer_vector!(builder, global_vars) + } +} + +#[async_trait::async_trait] +impl RepositoryIndexMut for FlatBufferRepoIndex { + async fn index_repo(repos: &Vec<(String, crate::RepositoryHandle)>) -> miette::Result<()> { + if repos.len() != 1 { + return Err(Error::String( + "FlatBufferRepoIndex's index_repo() method only works on one repo at a time" + .to_string(), + ) + .into()); + } + + let (packages, global_vars) = FlatBufferRepoIndex::gather_all_data_from_repo(repos).await?; + + // Assemble the data into a flatbuffer index and save it + let repo = &repos[0].1; + let builder = + FlatBufferRepoIndex::generate_index_builder(repo, packages, global_vars).await?; + + let start = Instant::now(); + let _filepath = FlatBufferRepoIndex::save_index(repo, &builder).await?; + tracing::info!( + "flatbuffer index for '{}' saved in : {} secs", + repo.name(), + start.elapsed().as_secs_f64() + ); + + Ok(()) + } + + // Index and the package version to update within it. The package + // version to update will have its data gathered from the + // repository rather than the current index. + async fn update_repo_with_package_version( + &self, + repo: &crate::RepositoryHandle, + package_version: &VersionIdent, + ) -> miette::Result<()> { + let (packages, global_vars) = self.gather_updates_from_repo(repo, package_version).await?; + + // Assemble the data into a flatbuffer index and save it + let builder = + FlatBufferRepoIndex::generate_index_builder(repo, packages, global_vars).await?; + + let start = Instant::now(); + let _filepath = FlatBufferRepoIndex::save_index(repo, &builder).await?; + tracing::info!( + "flatbuffer index for '{}' saved in: {} secs", + repo.name(), + start.elapsed().as_secs_f64() + ); + + Ok(()) + } +} + +impl RepositoryIndex for FlatBufferRepoIndex { + fn get_global_var_values(&self) -> HashMap> { + let fb_index = self.fb_index(); + if let Some(globals) = fb_index.global_vars() { + let mut global_vars: HashMap> = + HashMap::with_capacity(globals.len()); + for global in globals { + let opt_name = unsafe { OptNameBuf::from_string(global.name().to_string()) }; + if let Some(vs) = global.values() { + let values: HashSet = vs.iter().map(String::from).collect(); + global_vars.insert(opt_name, values); + } + } + return global_vars; + } + + Default::default() + } + + async fn list_packages(&self) -> Result> { + let fb_index = self.fb_index(); + let pkg_list = if let Some(packages) = fb_index.packages() { + packages + .iter() + .map(|p| unsafe { PkgNameBuf::from_string(p.name().to_string()) }) + .collect() + } else { + Default::default() + }; + + Ok(pkg_list) + } + + async fn list_package_versions(&self, name: &PkgName) -> Result>>> { + let fb_index = self.fb_index(); + let versions = if let Some(packages) = fb_index.packages() { + // binary search - because the name is key in flatbuffer schema + if let Some(packages) = packages.lookup_by_key(name, |pi, n| pi.name().cmp(n)) { + if let Some(versions) = packages.versions() { + versions + .iter() + .map(|version_index| { + let ver = version_index.version(); + let version = fb_version_to_version(ver); + Arc::new(version) + }) + .collect() + } else { + Vec::new() + } + } else { + Vec::new() + } + } else { + Vec::new() + }; + + Ok(Arc::new(versions)) + } + + async fn list_package_builds(&self, pkg: &VersionIdent) -> Result> { + let fb_index = self.fb_index(); + if let Some(packages) = fb_index.packages() + && let Some(package) = packages.lookup_by_key(pkg.name(), |pi, n| pi.name().cmp(n)) + && let Some(versions) = package.versions() + { + // linear search - versions are highest to lowest, but have + // lots of parts so that may be the time constraint. + for version_index in versions { + let ver = version_index.version(); + let version = fb_version_to_version(ver); + + if version == *pkg.version() + && let Some(builds) = version_index.builds() + && !builds.is_empty() + { + let build_ids: Vec<_> = builds + .iter() + .map(|b| { + let build_id = get_build_from_fb_build_index(b); + BuildIdent::new(pkg.clone(), build_id) + }) + .collect(); + + return Ok(build_ids); + } + } + } + + Ok(Vec::new()) + } + + async fn list_build_components(&self, pkg: &BuildIdent) -> Result> { + let fb_index = self.fb_index(); + if let Some(packages) = fb_index.packages() { + // binary search - because the name is key in flatbuffer schema + if let Some(package) = packages.lookup_by_key(pkg.name(), |pi, n| pi.name().cmp(n)) + && let Some(versions) = package.versions() + { + // linear search - versions are highest to lowest, but + // have lots of parts, see above. + for version_index in versions { + let ver = version_index.version(); + let version = fb_version_to_version(ver); + + if version == *pkg.version() + && let Some(builds) = version_index.builds() + { + // linear search - the builds aren't ordered + for build_index in builds { + let build_id = get_build_from_fb_build_index(build_index); + if build_id == *pkg.build() + && let Some(component_names) = build_index.published_components() + { + return Ok(fb_component_names_to_component_names(&component_names)); + } + } + } + } + } + } + + Ok(Vec::new()) + } + + async fn is_build_deprecated(&self, pkg: &BuildIdent) -> Result { + let fb_index = self.fb_index(); + + if let Some(packages) = fb_index.packages() + && let Some(package) = packages.lookup_by_key(pkg.name(), |pi, n| pi.name().cmp(n)) + && let Some(versions) = package.versions() + { + // linear search - the versions are highest to lowest, but have lots of parts + for version_index in versions { + let ver = version_index.version(); + let version = fb_version_to_version(ver); + + if version == *pkg.version() + && let Some(builds) = version_index.builds() + { + // linear search - builds aren't ordered + for build_index in builds { + let build_id = get_build_from_fb_build_index(build_index); + if build_id == *pkg.build() { + return Ok(build_index.is_deprecated()); + } + } + } + } + } + + Err(Error::String(format!( + "{pkg} not found while calling is_build_deprecated" + ))) + } + + fn get_package_build_spec(&self, pkg: &BuildIdent) -> Result> { + let fb_index = self.fb_index(); + if let Some(packages) = fb_index.packages() + && let Some(package) = packages.lookup_by_key(pkg.name(), |pi, n| pi.name().cmp(n)) + && let Some(versions) = package.versions() + { + // linear search - versions are highest to lowest, but + // versions have lots of parts + for version_index in versions { + let ver = version_index.version(); + let version = fb_version_to_version(ver); + + if version == *pkg.version() + && let Some(builds) = version_index.builds() + { + // linear search - the builds aren't ordered + for build_index in builds { + let build_id = get_build_from_fb_build_index(build_index); + if build_id == *pkg.build() { + // Use the build's published version instead of the + // version index's version to ensure the build ident + // for this build is correct. e.g it may have been + // published with a 1.0 version number instead of 1.0.0 + let package_build = if let Some(pv) = build_index.published_version() { + let published_version = fb_version_to_version(pv); + let ver_id = + VersionIdent::new(pkg.name().into(), published_version); + BuildIdent::new(ver_id, build_id) + } else { + // This should never happen because the published versions + // are stored with each build. + pkg.clone() + }; + + let build_spec = + self.make_solver_package_spec(package_build, &build_index)?; + + return Ok(Arc::new(build_spec)); + }; + } + } + } + } + + Err(Error::PackageNotFound(Box::new(pkg.to_any_ident()))) + } +} diff --git a/crates/spk-storage/src/storage/flatbuffer_index_test.rs b/crates/spk-storage/src/storage/flatbuffer_index_test.rs new file mode 100644 index 0000000000..a72869aa0d --- /dev/null +++ b/crates/spk-storage/src/storage/flatbuffer_index_test.rs @@ -0,0 +1,512 @@ +// Copyright (c) Contributors to the SPK project. +// SPDX-License-Identifier: Apache-2.0 +// https://github.com/spkenv/spk + +use std::collections::HashMap; +use std::convert::TryFrom; +use std::sync::Arc; + +use futures::TryStreamExt; +use itertools::Itertools; +use rstest::rstest; +use spfs::encoding::EMPTY_DIGEST; +use spk_schema::foundation::build_ident; +use spk_schema::foundation::fixtures::*; +use spk_schema::ident_build::BuildId; +use spk_schema::ident_component::Component; +use spk_schema::name::OptNameBuf; +use spk_schema::prelude::{HasVersion, Named, Versioned}; +use spk_schema::spec_ops::HasBuildIdent; +use spk_schema::{ + ComponentEmbeddedPackage, + ComponentSpec, + Components, + Deprecate, + OptionMap, + OptionValues, + Package, + Spec, + v0, +}; +use spk_solve_macros::{ + make_build, + //make_build_and_components, + make_package, + //pinned_request, +}; + +use crate::{IndexedRepository, RepoWalkerBuilder, RepoWalkerItem, RepositoryHandle}; + +// A copy of the one in spk-solve-macros because using it directly +// here cases an import loop +#[macro_export] +macro_rules! make_repo { + ( [ $( $spec:tt ),+ $(,)? ] ) => {{ + make_repo!([ $( $spec ),* ], options={}) + }}; + ( [ $( $spec:tt ),+ $(,)? ], options={ $($k:expr => $v:expr),* } ) => {{ + let options = spk_schema::foundation::option_map!{$($k => $v),*}; + make_repo!([ $( $spec ),* ], options=options) + }}; + ( [ $( $spec:tt ),+ $(,)? ], options=$options:expr ) => {{ + tracing::debug!("creating in-memory repository"); + // This line changed from the copy in spk_solve_macros + let repo = RepositoryHandle::new_mem(); + let _opts = $options; + $( + // This line changed from the copy in spk_solve_macros + let (s, cmpts) = make_package!(repo, $spec, &_opts); + tracing::trace!(pkg=%spk_schema::Package::ident(&s), cmpts=?cmpts.keys(), "adding package to repo"); + repo.publish_package(&s, &cmpts).await.unwrap(); + )* + repo + }}; +} + +// A cutdown copy of the one used by solver_tests +#[macro_export] +macro_rules! option_map { + ($($k:expr => $v:expr),* $(,)?) => {{ + #[allow(unused_mut)] + let mut opts = OptionMap::default(); + $(opts.insert( + OptNameBuf::try_from($k).expect("invalid option name"), + $v.into() + );)* + opts + }}; +} + +// Helper for making an IndexedRepository from a RepositoryHandle for testing with +async fn index_for_test(repo: RepositoryHandle) -> RepositoryHandle { + let ir = match IndexedRepository::generate_from_repo(Arc::new(repo)).await { + Ok(ir) => ir, + Err(err) => { + panic!( + "Unable to make IndexedRepository: Failed to generate an in-mem index from a repo: {err}" + ) + } + }; + ir.into() +} + +// Helper for comparing 2 Specs, from a repo and its index, to check +// the index one is equivalent to the repo one based on the data +// returned from the trait methods of each of them. +fn assert_packages_are_equivalent(build_from_repo: Arc, build_from_index: Arc) { + // Compare the trait methods that both packages implement + let pkg = build_from_repo.ident(); + + // BuildOptions - not implemented by Spec + + // Deprecate trait + assert_eq!( + build_from_repo.is_deprecated(), + build_from_index.is_deprecated(), + "is_deprecated() methods don't match [{pkg}]" + ); + + // Satisfy - need a sample request + + // Satisfy> - need a sample request + + // HasVersion + assert_eq!( + build_from_repo.version(), + build_from_index.version(), + "version() methods don't match [{pkg}]" + ); + + // HasBuild - not implemented by spec + + // BuildIdent + assert_eq!( + build_from_repo.build_ident(), + build_from_index.build_ident(), + "build_ident() methods don't match [{pkg}]" + ); + + // Named + assert_eq!( + build_from_repo.name(), + build_from_index.name(), + "name() methods don't match [{pkg}]" + ); + + // RuntimeEnvironment - not implemented by SolverPackageSpec + + // Versioned + assert_eq!( + build_from_repo.compat(), + build_from_index.compat(), + "compat() methods don't match [{pkg}]" + ); + + // Components - compared in pieces to exclude the parts that are + // not stored in the index. + let repo_components = build_from_repo.components(); + assert_eq!( + repo_components.len(), + build_from_index.components().len(), + "components() methods do not return the same number of components in index package: '{pkg}': [{}] vs [{}]", + repo_components + .iter() + .map(|cs| cs.name.to_string()) + .join(", "), + build_from_index + .components() + .iter() + .map(|cs| cs.name.to_string()) + .join(", "), + ); + + let index_components: HashMap = build_from_index + .components() + .iter() + .map(|cs| (cs.name.clone(), cs.clone())) + .collect(); + + for repo_comp in repo_components.iter() { + let index_comp = index_components.get(&repo_comp.name).unwrap_or_else(|| { + panic!( + "'{}' component should exist in index package: '{pkg}'", + repo_comp.name + ) + }); + + assert_eq!( + repo_comp.uses, index_comp.uses, + "components() methods don't match [{pkg}:{}] in .uses field", + repo_comp.name, + ); + assert_eq!( + repo_comp.requirements(), + index_comp.requirements(), + "components() methods don't match [{pkg}:{}] in requirements() method", + repo_comp.name, + ); + + let repo_comp_embedded: Vec = repo_comp.embedded.to_vec(); + let index_comp_embedded: Vec = index_comp.embedded.to_vec(); + + assert_eq!( + repo_comp_embedded, index_comp_embedded, + "components() methods don't match [{pkg}:{}] in embedded field (ignoring fabricated)", + repo_comp.name, + ); + assert_eq!( + repo_comp.requirements_with_options(), + index_comp.requirements_with_options(), + "components() methods don't match [{pkg}:{}] in requirements_with_options() method", + repo_comp.name, + ); + } + + // Package trait + assert_eq!( + build_from_repo.ident(), + build_from_index.ident(), + "ident() methods don't match [{pkg}]" + ); + + // metadata() - not implemented by SolverPackageSpec + // matches_all_filters(&self, filter_by: &Option>) -> bool - need sample filters + // sources(&self) -> &Vec - not implemented by SolverPackageSpec + + let repo_embedded = build_from_repo.embedded(); + assert_eq!( + repo_embedded.len(), + build_from_index.embedded().len(), + "embedded() methods do not return the same number of packages in index package: '{pkg}': [{}] vs [{}]", + repo_embedded + .iter() + .map(|es| es.ident().to_string()) + .join(", "), + build_from_index + .embedded() + .iter() + .map(|es| es.ident().to_string()) + .join(", "), + ); + + assert_eq!( + build_from_repo.embedded(), + build_from_index.embedded(), + "embedded() methods don't match [{pkg}]" + ); + + assert_eq!( + build_from_repo.embedded_as_packages(), + // TODO: could recursive compare these too? all the way down, might need to + build_from_index.embedded_as_packages(), + "embedded_as_packages() don't match [{pkg}]" + ); + + let re = build_from_repo.embedded_as_packages(); + let ie = build_from_index.embedded_as_packages(); + tracing::error!("RE:\n{re:?}\nIE:\n{ie:?}\n"); + + assert_eq!( + build_from_repo.get_build_options(), + build_from_index.get_build_options(), + "get_build_options() don't match [{pkg}]", + ); + + // get_build_requirements(&self) -> crate::Result>> - not implemented by SolverPackageSpec + + assert_eq!( + build_from_repo.runtime_requirements(), + build_from_index.runtime_requirements(), + "runtime_requirements() don't match [{pkg}]", + ); + + // fn get_all_tests(&self) -> Vec - not implemented by SolverPackageSpec + + // DownstreamRequirements - not implemented by SolverPackageSpec + + // OptionValues + assert_eq!( + build_from_repo.option_values(), + build_from_index.option_values(), + "option_values() don't match [{pkg}]", + ); +} + +// Helper for comparing the packages in the original repo with the +// ones in an indexed repo constructed from the original repo. +async fn assert_repo_has_same_packages_as_other_repo( + repo1: &RepositoryHandle, + repo2: &RepositoryHandle, +) { + // Spin thru' the first repo and get all the builds and look up + // each build in the second repo and then compare by calling all + // the relevant trait methods on both package builds and checking + // they give the same results. + let repos = vec![(format!("{}", repo1.name()), repo1.clone())]; + let mut repo_walker_builder = RepoWalkerBuilder::new(&repos); + let repo_walker = repo_walker_builder + .with_report_on_versions(true) + .with_report_on_builds(true) + .with_report_src_builds(true) + .with_report_deprecated_builds(true) + .with_report_embedded_builds(true) + .with_end_of_markers(true) + .with_sort_objects(true) + .with_continue_on_error(true) + .build(); + + let mut traversal = repo_walker.walk(); + while let Some(item) = traversal.try_next().await.unwrap() { + if let RepoWalkerItem::Build(build) = item { + let build_from_repo1 = build.spec; + println!( + "Original Build: '{}' '{:#}'", + build_from_repo1.ident(), + build_from_repo1.ident(), + ); + + let build_from_repo2 = repo2.read_package(build_from_repo1.ident()).await.unwrap(); + println!( + "Indexed Build: '{}' '{:#}'", + build_from_repo2.ident(), + build_from_repo2.ident() + ); + + assert_packages_are_equivalent(build_from_repo1, build_from_repo2) + } + } +} + +// Helper for comparing the packages in the original repo with the +// ones in an indexed repo constructed from the original repo. +async fn assert_repo_and_index_have_same_packages( + repo: RepositoryHandle, + indexed_repo: RepositoryHandle, +) { + println!("\nCheck one"); + assert_repo_has_same_packages_as_other_repo(&repo, &indexed_repo).await; + println!("\nCheck two"); + assert_repo_has_same_packages_as_other_repo(&indexed_repo, &repo).await; +} + +#[rstest] +#[tokio::test] +async fn test_flatbuffer_index_one_package_with_no_recipe(random_build_id: BuildId) { + let repo = RepositoryHandle::new_mem(); + + let spec = v0::PackageSpec::new(build_ident!(format!("my-pkg/1.0.0/{random_build_id}"))); + + // publish package without publishing spec + let components = vec![ + (Component::Run, EMPTY_DIGEST.into()), + (Component::Build, EMPTY_DIGEST.into()), + ] + .into_iter() + .collect(); + repo.publish_package(&spec.into(), &components) + .await + .unwrap(); + let indexed_repo = index_for_test(repo.clone()).await; + + assert_repo_and_index_have_same_packages(repo, indexed_repo).await; +} + +#[rstest] +#[tokio::test] +async fn test_flatbuffer_index_pre_release_config() { + let repo = make_repo!( + [ + {"pkg": "my-pkg/0.9.0"}, + {"pkg": "my-pkg/1.0.0-pre.0"}, + {"pkg": "my-pkg/1.0.0-pre.1"}, + {"pkg": "my-pkg/1.0.0-pre.2"}, + ] + ); + let indexed_repo = index_for_test(repo.clone()).await; + + assert_repo_and_index_have_same_packages(repo, indexed_repo).await; +} + +#[rstest] +#[tokio::test] +async fn test_flatbuffer_index_component_embedded_component_requirements() { + let repo = make_repo!( + [ + { + "pkg": "mypkg/1.0.0", + "install": { + "components": [ + {"name": "comp1"}, + {"name": "comp2"}, + ], + "embedded": [ + {"pkg": "dep-e1/1.0.0", + "install": {"components": [ + // comp1 requires a package that exists + {"name": "comp1", "requirements": [{"pkg": "dep-e2/1.0.0"}]}, + // comp2 requires a package that does not exist + {"name": "comp2", "requirements": [{"pkg": "dep-e3/1.0.0"}]} + ]} + }, + ], + }, + }, + {"pkg": "dep-e2/1.0.0"}, + ] + ); + + let indexed_repo = index_for_test(repo.clone()).await; + + assert_repo_and_index_have_same_packages(repo, indexed_repo).await; +} + +#[rstest] +#[tokio::test] +async fn test_flatbuffer_index_component_embedded_multiple_versions() { + // test when different components embed different versions of the same + // embedded package + // - requesting individual components should select the correct version of + // the embedded package + let repo = make_repo!( + [ + { + "pkg": "mypkg/1.0.0", + "install": { + "components": [ + {"name": "build", "embedded": ["dep-e1:all/1.0.0"]}, + {"name": "run", "embedded": ["dep-e1:all/2.0.0"]}, + ], + "embedded": [ + {"pkg": "dep-e1/1.0.0"}, + {"pkg": "dep-e1/2.0.0"}, + ], + }, + }, + {"pkg": "dep-e1/1.0.0"}, + {"pkg": "dep-e1/2.0.0"}, + // Should solve + { + "pkg": "downstream1", + "install": { + "requirements": [{"pkg": "dep-e1/1.0.0"}, {"pkg": "mypkg:build"}] + }, + }, + // Should solve + { + "pkg": "downstream2", + "install": { + "requirements": [{"pkg": "dep-e1/2.0.0"}, {"pkg": "mypkg:run"}] + }, + }, + // Should not solve + { + "pkg": "downstream3", + "install": { + "requirements": [{"pkg": "dep-e1/1.0.0"}, {"pkg": "mypkg:run"}] + }, + }, + ] + ); + let indexed_repo = index_for_test(repo.clone()).await; + + assert_repo_and_index_have_same_packages(repo, indexed_repo).await; +} + +#[rstest] +#[tokio::test] +async fn test_flatbuffers_index_version_number_masking() { + // Uses a dummy package to prime the repo + let repo1 = make_repo!([{"pkg": "python/3.9.7"}]); + let repo2 = make_repo!([{"pkg": "python/3.9.7"}]); + + // One repo gets 1.0.0 version and the other gets a 1.0 version + let options = option_map! { "color" => "red" }; + let (s, cmpts) = make_package!( + repo1, + { + "pkg": "my-pkg/1.0.0", + "build": {"options": [{"var": "color"}]}, + }, + options + ); + repo1.publish_package(&s, &cmpts).await.unwrap(); + tracing::info!(pkg=%spk_schema::Package::ident(&s), "published package to repo1"); + let options = option_map! { "color" => "blue" }; + let (s, cmpts) = make_package!( + repo2, + { + "pkg": "my-pkg/1.0", + "build": {"options": [{"var": "color"}]}, + }, + options + ); + repo2.publish_package(&s, &cmpts).await.unwrap(); + tracing::info!(pkg=%spk_schema::Package::ident(&s), "published package to repo2"); + + println!("Repo1's"); + + let indexed_repo1 = index_for_test(repo1.clone()).await; + + assert_repo_and_index_have_same_packages(repo1, indexed_repo1).await; + + println!("Repo2's"); + + let indexed_repo2 = index_for_test(repo2.clone()).await; + + assert_repo_and_index_have_same_packages(repo2, indexed_repo2).await; +} + +#[rstest] +#[tokio::test] +async fn test_flatbuffer_deprecated_version() { + let deprecated = make_build!({"pkg": "my-pkg/1.0.0", "deprecated": true}); + let repo = make_repo!( + [{"pkg": "my-pkg/0.9.0"}, {"pkg": "my-pkg/1.0.0", "deprecated": true}, deprecated] + ); + + let indexed_repo = index_for_test(repo.clone()).await; + + assert_repo_and_index_have_same_packages(repo, indexed_repo).await; +} + +// TODO: add rest of solves sample repos to this as tests diff --git a/crates/spk-storage/src/storage/handle.rs b/crates/spk-storage/src/storage/handle.rs index 3c332c717c..1be215714d 100644 --- a/crates/spk-storage/src/storage/handle.rs +++ b/crates/spk-storage/src/storage/handle.rs @@ -3,17 +3,19 @@ // https://github.com/spkenv/spk use spk_schema::{Spec, SpecRecipe}; +use variantly::Variantly; use super::Repository; type Handle = dyn Repository; -#[derive(Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[derive(Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Clone, Variantly)] #[allow(clippy::large_enum_variant)] pub enum RepositoryHandle { SPFS(super::SpfsRepository), Mem(super::MemRepository), Runtime(super::RuntimeRepository), + Indexed(super::IndexedRepository), } impl RepositoryHandle { @@ -30,23 +32,12 @@ impl RepositoryHandle { Self::Runtime(Default::default()) } - pub fn is_spfs(&self) -> bool { - matches!(self, Self::SPFS(_)) - } - - pub fn is_mem(&self) -> bool { - matches!(self, Self::Mem(_)) - } - - pub fn is_runtime(&self) -> bool { - matches!(self, Self::Runtime(_)) - } - pub fn to_repo(self) -> Box { match self { Self::SPFS(repo) => Box::new(repo), Self::Mem(repo) => Box::new(repo), Self::Runtime(repo) => Box::new(repo), + Self::Indexed(repo) => Box::new(repo), } } } @@ -59,6 +50,7 @@ impl std::ops::Deref for RepositoryHandle { RepositoryHandle::SPFS(repo) => repo, RepositoryHandle::Mem(repo) => repo, RepositoryHandle::Runtime(repo) => repo, + RepositoryHandle::Indexed(repo) => repo, } } } @@ -69,6 +61,7 @@ impl std::ops::DerefMut for RepositoryHandle { RepositoryHandle::SPFS(repo) => repo, RepositoryHandle::Mem(repo) => repo, RepositoryHandle::Runtime(repo) => repo, + RepositoryHandle::Indexed(repo) => repo, } } } @@ -90,3 +83,9 @@ impl From for RepositoryHandle { RepositoryHandle::Runtime(repo) } } + +impl From for RepositoryHandle { + fn from(repo: super::IndexedRepository) -> Self { + RepositoryHandle::Indexed(repo) + } +} diff --git a/crates/spk-storage/src/storage/indexed.rs b/crates/spk-storage/src/storage/indexed.rs new file mode 100644 index 0000000000..796d867318 --- /dev/null +++ b/crates/spk-storage/src/storage/indexed.rs @@ -0,0 +1,378 @@ +// Copyright (c) Contributors to the SPK project. +// SPDX-License-Identifier: Apache-2.0 +// https://github.com/spkenv/spk + +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; + +use arc_swap::ArcSwap; +use spk_schema::foundation::ident_build::Build; +use spk_schema::foundation::ident_component::Component; +use spk_schema::foundation::name::{PkgName, PkgNameBuf, RepositoryName}; +use spk_schema::foundation::version::Version; +use spk_schema::ident::VersionIdent; +use spk_schema::ident_build::EmbeddedSource; +use spk_schema::name::OptNameBuf; +use spk_schema::{BuildIdent, Spec, SpecRecipe}; + +use super::repository::{PublishPolicy, Repository, Storage}; +use crate::storage::{FLATBUFFER_INDEX, FlatBufferRepoIndex, RepoIndex, RepositoryIndex}; +use crate::{Error, Result}; + +/// A spk repository that wraps another repository with that +/// repository's index. Operations that solvers need will use the +/// index and typically be faster, other operations, especially +/// writes, will pass through to the underlying wrapped repository. +#[derive(Debug)] +pub struct IndexedRepository { + /// The index of the wrapped repo + index: ArcSwap, + /// The underlying repo that has an index + wrapped_repo: Arc, + /// For automated tests + update_index_after_any_publish: bool, +} + +impl Clone for IndexedRepository { + fn clone(&self) -> Self { + Self { + index: ArcSwap::new(self.index.load_full()), + wrapped_repo: self.wrapped_repo.clone(), + update_index_after_any_publish: self.update_index_after_any_publish, + } + } +} + +impl IndexedRepository { + /// Get the name of the kind of index from spk's config. This is + /// used to work out what kind of index to load or save. + fn get_index_kind_from_config() -> Result { + let config = spk_config::get_config()?; + + let index_kind = if config.solver.indexes.kind != String::default() { + config.solver.indexes.kind.clone() + } else { + String::from(FLATBUFFER_INDEX) + }; + + tracing::debug!("Index kind from config: '{index_kind}'"); + Ok(index_kind) + } + + /// Set whether to update the internal index after any publish or + /// write operation on this repository. This is meant for use by + /// automated testing. Updating the index continuously may be costly. + pub fn set_update_index_after_any_publish(&mut self, value: bool) { + self.update_index_after_any_publish = value + } + + /// Create an IndexedRepository for the given repo by loading the + /// index from, and for, that repo. + pub async fn load_from_repo( + repo_to_wrap: Arc, + ) -> Result { + let index_kind = IndexedRepository::get_index_kind_from_config()?; + + let index = match index_kind.as_ref() { + FLATBUFFER_INDEX => { + tracing::debug!("Flatbuffer index selected"); + + match FlatBufferRepoIndex::from_repo_file(&repo_to_wrap).await { + Ok(i) => RepoIndex::Flat(i), + Err(err) => { + return Err(Error::IndexFailedToLoad(err.to_string())); + } + } + } + _ => { + return Err(Error::IndexUnknownKind( + index_kind, + "load from file".to_string(), + )); + } + }; + + Ok(IndexedRepository { + index: ArcSwap::new(Arc::new(index)), + wrapped_repo: repo_to_wrap, + update_index_after_any_publish: false, + }) + } + + /// Internal helper method for generating an index in memory. + async fn generate_in_memory_index_from_repo( + repo_to_wrap: &Arc, + ) -> Result { + let index_kind = IndexedRepository::get_index_kind_from_config()?; + + match index_kind.as_ref() { + FLATBUFFER_INDEX => { + tracing::debug!("Flatbuffer index selected"); + + match FlatBufferRepoIndex::from_repo_in_memory(repo_to_wrap).await { + Ok(i) => Ok(RepoIndex::Flat(i)), + Err(err) => Err(Error::IndexFailedToGenerate(err.to_string())), + } + } + _ => Err(Error::IndexUnknownKind( + index_kind, + "create in memory".to_string(), + )), + } + } + + /// Create an IndexedRepository from the given repo by generating + /// an in-memory index from the repository's data. This may take + /// some time and does not save the index anywhere else. + pub async fn generate_from_repo( + repo_to_wrap: Arc, + ) -> Result { + let index = IndexedRepository::generate_in_memory_index_from_repo(&repo_to_wrap).await?; + + Ok(IndexedRepository { + index: ArcSwap::new(Arc::new(index)), + wrapped_repo: repo_to_wrap, + update_index_after_any_publish: false, + }) + } + + /// Rebuild the index in memory from the underlying repo. + async fn rebuild_internal_index(&self) -> Result<()> { + let new_index = + IndexedRepository::generate_in_memory_index_from_repo(&self.wrapped_repo).await?; + + self.index.store(Arc::new(new_index)); + Ok(()) + } + + /// Returns a mapping of all the global vars and their values from + /// all the builds in the repo. This is used to prime the resolvo solver. + pub fn get_global_var_values(&self) -> HashMap> { + self.index.load().get_global_var_values() + } +} + +impl std::hash::Hash for IndexedRepository { + fn hash(&self, state: &mut H) { + // Pass through to the wrapped repo + self.wrapped_repo.address().hash(state); + } +} + +impl Ord for IndexedRepository { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // Pass through to the wrapped repo + self.wrapped_repo + .address() + .cmp(other.wrapped_repo.address()) + } +} + +impl PartialOrd for IndexedRepository { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl PartialEq for IndexedRepository { + fn eq(&self, other: &Self) -> bool { + // Pass through to the wrapped repo + self.wrapped_repo.address() == other.wrapped_repo.address() + } +} + +impl Eq for IndexedRepository {} + +#[async_trait::async_trait] +impl Storage for IndexedRepository { + type Recipe = SpecRecipe; + type Package = Spec; + + async fn get_concrete_package_builds(&self, pkg: &VersionIdent) -> Result> { + // This almost identical to embedded builds method below, but + // with the filter_map function swapped around. + let all_builds = self.index.load().list_package_builds(pkg).await?; + Ok(all_builds + .into_iter() + .filter_map(|b| if b.is_embedded() { None } else { Some(b) }) + .collect()) + } + + async fn get_embedded_package_builds(&self, pkg: &VersionIdent) -> Result> { + // This almost identical to concrete builds method above, but + // with the filter_map functions swapped around. + let all_builds = self.index.load().list_package_builds(pkg).await?; + Ok(all_builds + .into_iter() + .filter_map(|b| if b.is_embedded() { Some(b) } else { None }) + .collect()) + } + + async fn publish_embed_stub_to_storage(&self, spec: &Self::Package) -> Result<()> { + // Pass through to the wrapped repo to do the update + self.wrapped_repo + .publish_embed_stub_to_storage(spec) + .await?; + + // Rebuild the index only if enabled + if self.update_index_after_any_publish { + self.rebuild_internal_index().await?; + } + + Ok(()) + } + + async fn publish_package_to_storage( + &self, + package: &::Output, + components: &HashMap, + ) -> Result<()> { + // Pass thru to the wrapped repo to do the update + self.wrapped_repo + .publish_package_to_storage(package, components) + .await?; + + // Rebuild the index only if enabled + if self.update_index_after_any_publish { + self.rebuild_internal_index().await?; + } + + Ok(()) + } + + async fn publish_recipe_to_storage( + &self, + spec: &Self::Recipe, + publish_policy: PublishPolicy, + ) -> Result<()> { + // Pass thru to the wrapped repo to do the update + self.wrapped_repo + .publish_recipe_to_storage(spec, publish_policy) + .await?; + + // Rebuild the index only if enabled + if self.update_index_after_any_publish { + self.rebuild_internal_index().await?; + } + + Ok(()) + } + + async fn read_components_from_storage( + &self, + pkg: &BuildIdent, + ) -> Result> { + // Pass through to the wrapped repo + self.wrapped_repo.read_components_from_storage(pkg).await + } + + async fn read_package_from_storage( + &self, + pkg: &BuildIdent, + ) -> Result::Output>> { + // TODO: remove or put back, this commented out code block + // once deprecation and indexing is decided on. + // if self.is_build_deprecated(pkg).await? { + // // Deprecated builds are stored as partial entries in the + // // index so they have to be read from the underlying repo. + // self.wrapped_repo.read_package_from_storage(pkg).await + // } else { + self.index.load().get_package_build_spec(pkg) + //} + } + + async fn remove_embed_stub_from_storage(&self, pkg: &BuildIdent) -> Result<()> { + // Pass through to the wrapped repo to do the update + self.wrapped_repo + .remove_embed_stub_from_storage(pkg) + .await?; + + // Rebuild the index only if enabled + if self.update_index_after_any_publish { + self.rebuild_internal_index().await?; + } + + Ok(()) + } + + async fn remove_package_from_storage(&self, pkg: &BuildIdent) -> Result<()> { + // Pass through to the wrapped repo to do the update + self.wrapped_repo.remove_package_from_storage(pkg).await?; + + // Rebuild the index only if enabled + if self.update_index_after_any_publish { + self.rebuild_internal_index().await?; + } + + Ok(()) + } +} + +#[async_trait::async_trait] +impl Repository for IndexedRepository { + fn address(&self) -> &url::Url { + // Pass through to the wrapped repo + self.wrapped_repo.address() + } + + async fn list_packages(&self) -> Result> { + self.index.load().list_packages().await + } + + async fn list_package_versions(&self, name: &PkgName) -> Result>>> { + self.index.load().list_package_versions(name).await + } + + async fn list_package_builds(&self, pkg: &VersionIdent) -> Result> { + self.index.load().list_package_builds(pkg).await + } + + async fn list_build_components(&self, pkg: &BuildIdent) -> Result> { + self.index.load().list_build_components(pkg).await + } + + async fn is_build_deprecated(&self, build: &BuildIdent) -> Result { + self.index.load().is_build_deprecated(build).await + } + + fn name(&self) -> &RepositoryName { + // Pass through to the wrapped repo + self.wrapped_repo.name() + } + + async fn read_embed_stub(&self, pkg: &BuildIdent) -> Result> { + match pkg.build() { + Build::Embedded(EmbeddedSource::Package { .. }) => { + // Allow embedded stubs to be read as a "package" + } + _ => { + return Err(format!("Cannot read this ident as an embed stub: {pkg}").into()); + } + }; + + if self.is_build_deprecated(pkg).await? { + // Deprecated builds are stored as partial entries in the + // index so they have to be read from the underlying repo. + self.wrapped_repo.read_package_from_storage(pkg).await + } else { + self.index.load().get_package_build_spec(pkg) + } + } + + async fn read_recipe(&self, pkg: &VersionIdent) -> Result> { + // Pass through to the wrapped repo + self.wrapped_repo.read_recipe(pkg).await + } + + async fn remove_recipe(&self, pkg: &VersionIdent) -> Result<()> { + // Pass through to the wrapped repo to do the update + self.wrapped_repo.remove_recipe(pkg).await?; + + // Rebuild the index only if enabled + if self.update_index_after_any_publish { + self.rebuild_internal_index().await?; + } + + Ok(()) + } +} diff --git a/crates/spk-storage/src/storage/mem.rs b/crates/spk-storage/src/storage/mem.rs index d7dd09d1b4..60820e9a25 100644 --- a/crates/spk-storage/src/storage/mem.rs +++ b/crates/spk-storage/src/storage/mem.rs @@ -10,7 +10,7 @@ use spk_schema::foundation::ident_build::Build; use spk_schema::foundation::ident_component::Component; use spk_schema::foundation::name::{PkgName, PkgNameBuf, RepositoryName, RepositoryNameBuf}; use spk_schema::foundation::version::Version; -use spk_schema::{BuildIdent, Spec, SpecRecipe, VersionIdent}; +use spk_schema::{BuildIdent, Deprecate, Spec, SpecRecipe, VersionIdent}; use tokio::sync::RwLock; use super::Repository; @@ -328,6 +328,11 @@ where .unwrap_or_default()) } + async fn is_build_deprecated(&self, build: &BuildIdent) -> Result { + let spec = self.read_package(build).await?; + Ok(spec.is_deprecated()) + } + fn name(&self) -> &RepositoryName { self.name.as_ref() } diff --git a/crates/spk-storage/src/storage/mod.rs b/crates/spk-storage/src/storage/mod.rs index 2b3d7240d4..25c9d7a3c6 100644 --- a/crates/spk-storage/src/storage/mod.rs +++ b/crates/spk-storage/src/storage/mod.rs @@ -3,16 +3,22 @@ // https://github.com/spkenv/spk mod archive; +mod flatbuffer_index; mod handle; +mod indexed; mod mem; mod repository; +mod repository_index; mod runtime; mod spfs; pub use archive::export_package; +pub use flatbuffer_index::{FLATBUFFER_INDEX, FlatBufferRepoIndex}; pub use handle::RepositoryHandle; +pub use indexed::IndexedRepository; pub use mem::MemRepository; pub use repository::{CachePolicy, Repository, Storage}; +pub use repository_index::{RepoIndex, RepositoryIndex, RepositoryIndexMut}; pub use runtime::{RuntimeRepository, find_path_providers, pretty_print_filepath}; pub use self::spfs::{ diff --git a/crates/spk-storage/src/storage/repository.rs b/crates/spk-storage/src/storage/repository.rs index 43de1d1d68..c0a6a07c14 100644 --- a/crates/spk-storage/src/storage/repository.rs +++ b/crates/spk-storage/src/storage/repository.rs @@ -336,6 +336,9 @@ pub trait Repository: Storage + Sync { /// Returns the set of components published for a package build async fn list_build_components(&self, pkg: &BuildIdent) -> Result>; + /// Returns the true if the given package/version/build is deprecated. + async fn is_build_deprecated(&self, _build: &BuildIdent) -> Result; + /// Return the repository's name, as in "local" or its name in the config file. fn name(&self) -> &RepositoryName; diff --git a/crates/spk-storage/src/storage/repository_index.rs b/crates/spk-storage/src/storage/repository_index.rs new file mode 100644 index 0000000000..453bd8663f --- /dev/null +++ b/crates/spk-storage/src/storage/repository_index.rs @@ -0,0 +1,106 @@ +// Copyright (c) Contributors to the SPK project. +// SPDX-License-Identifier: Apache-2.0 +// https://github.com/spkenv/spk + +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; + +use spk_schema::foundation::ident_component::Component; +use spk_schema::foundation::name::{PkgName, PkgNameBuf}; +use spk_schema::foundation::version::Version; +use spk_schema::ident::VersionIdent; +use spk_schema::name::OptNameBuf; +use spk_schema::{BuildIdent, Spec}; + +use crate::Result; +use crate::storage::FlatBufferRepoIndex; + +/// Index read operations for a repository index +pub trait RepositoryIndex: Sync { + /// To help the resolvo solver + fn get_global_var_values(&self) -> HashMap>; + + /// For solving, closely related to the Repository trait methods of the same names + async fn list_packages(&self) -> Result>; + + async fn list_package_versions(&self, name: &PkgName) -> Result>>>; + + async fn list_package_builds(&self, pkg: &VersionIdent) -> Result>; + + async fn list_build_components(&self, pkg: &BuildIdent) -> Result>; + + async fn is_build_deprecated(&self, build: &BuildIdent) -> Result; + + /// Returns a valid package build spec for the given package build + /// ident from the index. + fn get_package_build_spec(&self, pkg: &BuildIdent) -> Result>; +} + +/// Index creation and updating operations for a repository index +#[async_trait::async_trait] +pub trait RepositoryIndexMut { + /// Generate the index for the given repo and store it for later use + #[allow(clippy::ptr_arg)] + async fn index_repo(repos: &Vec<(String, crate::RepositoryHandle)>) -> miette::Result<()>; + + // TODO: possible rename, should it be something like + // update_package_entry() instead? + /// Update an existing index for the given package/version. For + /// use when a package has been published to a repo to add it to + /// the index without generating the entire index from scratch. + async fn update_repo_with_package_version( + &self, + repo: &crate::RepositoryHandle, + package_version: &VersionIdent, + ) -> miette::Result<()>; +} + +/// Type for wrapping different kinds of indexes +#[derive(Debug, Clone)] +pub enum RepoIndex { + Flat(FlatBufferRepoIndex), +} + +impl RepositoryIndex for RepoIndex { + fn get_global_var_values(&self) -> HashMap> { + match self { + RepoIndex::Flat(i) => i.get_global_var_values(), + } + } + + async fn list_packages(&self) -> Result> { + match self { + RepoIndex::Flat(i) => i.list_packages().await, + } + } + + async fn list_package_versions(&self, name: &PkgName) -> Result>>> { + match self { + RepoIndex::Flat(i) => i.list_package_versions(name).await, + } + } + + async fn list_package_builds(&self, pkg: &VersionIdent) -> Result> { + match self { + RepoIndex::Flat(i) => i.list_package_builds(pkg).await, + } + } + + async fn list_build_components(&self, pkg: &BuildIdent) -> Result> { + match self { + RepoIndex::Flat(i) => i.list_build_components(pkg).await, + } + } + + async fn is_build_deprecated(&self, build: &BuildIdent) -> Result { + match self { + RepoIndex::Flat(i) => i.is_build_deprecated(build).await, + } + } + + fn get_package_build_spec(&self, pkg: &BuildIdent) -> Result> { + match self { + RepoIndex::Flat(i) => i.get_package_build_spec(pkg), + } + } +} diff --git a/crates/spk-storage/src/storage/repository_test.rs b/crates/spk-storage/src/storage/repository_test.rs index 9c4208e6d8..c632eded4d 100644 --- a/crates/spk-storage/src/storage/repository_test.rs +++ b/crates/spk-storage/src/storage/repository_test.rs @@ -28,6 +28,7 @@ use crate::fixtures::*; #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_list_empty(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -40,6 +41,7 @@ async fn test_repo_list_empty(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_list_package_versions_empty(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -55,6 +57,7 @@ async fn test_repo_list_package_versions_empty(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_list_package_builds_empty(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -68,6 +71,7 @@ async fn test_repo_list_package_builds_empty(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_read_recipe_empty(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -81,6 +85,7 @@ async fn test_repo_read_recipe_empty(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_read_package_empty(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -94,6 +99,7 @@ async fn test_repo_read_package_empty(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_publish_recipe(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -134,6 +140,7 @@ async fn test_repo_publish_recipe(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_publish_package(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -209,6 +216,7 @@ async fn create_repo_for_embed_stubs_test(repo: &TempRepo) -> (SpecRecipe, Spec) #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_publish_spec_updates_embed_stubs(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -377,6 +385,7 @@ async fn test_repo_update_and_deprecate_spec_updates_embed_stubs(#[case] repo: R #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_publish_package_creates_embed_stubs(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -397,6 +406,7 @@ async fn test_repo_publish_package_creates_embed_stubs(#[case] repo: RepoKind) { #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_embedded_stub_build_version_is_normalized(#[case] repo: RepoKind) { let repo = make_repo(repo).await; @@ -447,6 +457,7 @@ async fn test_embedded_stub_build_version_is_normalized(#[case] repo: RepoKind) #[rstest] #[case::mem(RepoKind::Mem)] #[case::spfs(RepoKind::Spfs)] +#[case::indexed(RepoKind::IndexedMem)] #[tokio::test] async fn test_repo_remove_package_removes_embed_stubs(#[case] repo: RepoKind) { let repo = make_repo(repo).await; diff --git a/crates/spk-storage/src/storage/runtime.rs b/crates/spk-storage/src/storage/runtime.rs index e7eee7c3d0..8c2a58b2b6 100644 --- a/crates/spk-storage/src/storage/runtime.rs +++ b/crates/spk-storage/src/storage/runtime.rs @@ -17,7 +17,7 @@ use spk_schema::foundation::ident_component::Component; use spk_schema::foundation::name::{PkgName, PkgNameBuf, RepositoryName, RepositoryNameBuf}; use spk_schema::foundation::version::{Version, parse_version}; use spk_schema::ident_build::{Build, EmbeddedSource}; -use spk_schema::{BuildIdent, Components, FromYaml, Spec, SpecRecipe, VersionIdent}; +use spk_schema::{BuildIdent, Components, Deprecate, FromYaml, Spec, SpecRecipe, VersionIdent}; use super::Repository; use super::repository::{PublishPolicy, Storage}; @@ -450,6 +450,11 @@ impl Repository for RuntimeRepository { .collect() } + async fn is_build_deprecated(&self, build: &BuildIdent) -> Result { + let spec = self.read_package(build).await?; + Ok(spec.is_deprecated()) + } + async fn read_embed_stub(&self, pkg: &BuildIdent) -> Result> { Err(Error::PackageNotFound(Box::new(pkg.to_any_ident()))) } diff --git a/crates/spk-storage/src/storage/spfs.rs b/crates/spk-storage/src/storage/spfs.rs index 2b565fc281..6e7a7629dd 100644 --- a/crates/spk-storage/src/storage/spfs.rs +++ b/crates/spk-storage/src/storage/spfs.rs @@ -26,7 +26,17 @@ use spk_schema::ident::{AsVersionIdent, VersionIdent}; use spk_schema::ident_build::parsing::embedded_source_package; use spk_schema::ident_build::{EmbeddedSource, EmbeddedSourcePackage}; use spk_schema::ident_ops::TagPath; -use spk_schema::{AnyIdent, BuildIdent, FromYaml, Opt, Package, Recipe, Spec, SpecRecipe}; +use spk_schema::{ + AnyIdent, + BuildIdent, + Deprecate, + FromYaml, + Opt, + Package, + Recipe, + Spec, + SpecRecipe, +}; use tokio::io::AsyncReadExt; use super::CachePolicy; @@ -763,6 +773,11 @@ impl crate::Repository for SpfsRepository { r } + async fn is_build_deprecated(&self, build: &BuildIdent) -> Result { + let spec = self.read_package(build).await?; + Ok(spec.is_deprecated()) + } + fn name(&self) -> &RepositoryName { &self.name } diff --git a/cspell.json b/cspell.json index b81cb15c5e..cbc565a5c9 100644 --- a/cspell.json +++ b/cspell.json @@ -243,6 +243,7 @@ "flatbuf", "flatbuffer", "flatbuffers", + "flatb", "flatc", "FLATC", "flifetime", @@ -417,6 +418,8 @@ "maxmin", "MDQOIZUV", "MEJOYQ", + "memmap", + "memmapped", "mesg", "metacopy", "metatdata", @@ -432,6 +435,8 @@ "mkrecipe", "mksource", "mksrc", + "mmap", + "Mmap", "modversions", "mountpoint", "mpfr",