From 1a254008e4eaad425c83554cae7b3fc006b4c82d Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 30 May 2024 09:17:41 -0500 Subject: [PATCH 1/5] test(git): Show unused duplicate package warning --- tests/testsuite/git.rs | 113 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 106 insertions(+), 7 deletions(-) diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs index 84dce88a062..3ffe937b4b4 100644 --- a/tests/testsuite/git.rs +++ b/tests/testsuite/git.rs @@ -1132,23 +1132,115 @@ fn ambiguous_published_deps() { let git_project = git::new("dep", |project| { project .file( - "aaa/Cargo.toml", + "duplicate1/Cargo.toml", &format!( r#" [package] - name = "bar" + name = "duplicate" version = "0.5.0" edition = "2015" publish = true "# ), ) - .file("aaa/src/lib.rs", "") + .file("duplicate1/src/lib.rs", "") .file( - "bbb/Cargo.toml", + "duplicate2/Cargo.toml", &format!( r#" [package] + name = "duplicate" + version = "0.5.0" + edition = "2015" + publish = true + "# + ), + ) + .file("duplicate2/src/lib.rs", "") + }); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + edition = "2015" + authors = ["wycats@example.com"] + + [dependencies.duplicate] + git = '{}' + "#, + git_project.url() + ), + ) + .file("src/main.rs", "fn main() { }") + .build(); + + p.cargo("build").run(); + p.cargo("run") + .with_stderr_data(str![[r#" +[WARNING] skipping duplicate package `duplicate` found at `[ROOT]/home/.cargo/git/checkouts/dep-[HASH]/e916fc8/duplicate2` +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[RUNNING] `target/debug/foo[EXE]` + +"#]], + ) + .run(); +} + +#[cargo_test] +fn unused_ambiguous_published_deps() { + let project = project(); + let git_project = git::new("dep", |project| { + project + .file( + "unique/Cargo.toml", + &format!( + r#" + [package] + name = "unique" + version = "0.5.0" + edition = "2015" + publish = true + "# + ), + ) + .file("unique/src/lib.rs", "") + .file( + "duplicate1/Cargo.toml", + &format!( + r#" + [package] + name = "duplicate" + version = "0.5.0" + edition = "2015" + publish = true + "# + ), + ) + .file("duplicate1/src/lib.rs", "") + .file( + "duplicate2/Cargo.toml", + &format!( + r#" + [package] + name = "duplicate" + version = "0.5.0" + edition = "2015" + publish = true + "# + ), + ) + .file("duplicate2/src/lib.rs", "") + .file( + "invalid/Cargo.toml", + &format!( + r#" + [package name = "bar" version = "0.5.0" edition = "2015" @@ -1156,7 +1248,7 @@ fn ambiguous_published_deps() { "# ), ) - .file("bbb/src/lib.rs", "") + .file("invalid/src/lib.rs", "") }); let p = project @@ -1171,7 +1263,7 @@ fn ambiguous_published_deps() { edition = "2015" authors = ["wycats@example.com"] - [dependencies.bar] + [dependencies.unique] git = '{}' "#, git_project.url() @@ -1183,7 +1275,14 @@ fn ambiguous_published_deps() { p.cargo("build").run(); p.cargo("run") .with_stderr_data(str![[r#" -[WARNING] skipping duplicate package `bar` found at `[ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]` +[ERROR] invalid table header +expected `.`, `]` + --> ../home/.cargo/git/checkouts/dep-[HASH]/caf7f52/invalid/Cargo.toml:2:29 + | +2 | [package + | ^ + | +[WARNING] skipping duplicate package `duplicate` found at `[ROOT]/home/.cargo/git/checkouts/dep-[HASH]/caf7f52/duplicate2` [FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s [RUNNING] `target/debug/foo[EXE]` From 6c0f14c245406e09c1ed7bda8ff770bdd8c3c54e Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 11 Jul 2024 20:29:39 -0500 Subject: [PATCH 2/5] fix(source): Make duplicate selection predictable --- src/cargo/sources/path.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 43dc794c807..d7e80e214f2 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -882,6 +882,8 @@ fn walk(path: &Path, callback: &mut dyn FnMut(&Path) -> CargoResult) -> Ca return Err(e.context(cx)); } }; + let mut dirs = dirs.collect::>(); + dirs.sort_unstable_by_key(|d| d.as_ref().ok().map(|d| d.file_name())); for dir in dirs { let dir = dir?; if dir.file_type()?.is_dir() { From 58ee6353371b7411301042e54007e69cd50abc78 Mon Sep 17 00:00:00 2001 From: Ed Page Date: Wed, 10 Jul 2024 16:18:26 -0500 Subject: [PATCH 3/5] fix(source): Consolidate duplicate package warnings --- src/cargo/sources/path.rs | 64 ++++++++++++++++++++++++--------------- tests/testsuite/git.rs | 15 ++++++--- 2 files changed, 50 insertions(+), 29 deletions(-) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index d7e80e214f2..0740b8c66d9 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -226,7 +226,9 @@ pub struct RecursivePathSource<'gctx> { /// Whether this source has loaded all package information it may contain. loaded: bool, /// Packages that this sources has discovered. - packages: HashMap, + /// + /// Tracking all packages for a given ID to warn on-demand for unused packages + packages: HashMap>, gctx: &'gctx GlobalContext, } @@ -253,7 +255,7 @@ impl<'gctx> RecursivePathSource<'gctx> { /// filesystem if package information haven't yet loaded. pub fn read_packages(&mut self) -> CargoResult> { self.load()?; - Ok(self.packages.iter().map(|(_, v)| v.clone()).collect()) + Ok(self.packages.iter().map(|(_, v)| v[0].clone()).collect()) } /// List all files relevant to building this package inside this source. @@ -290,6 +292,32 @@ impl<'gctx> RecursivePathSource<'gctx> { pub fn load(&mut self) -> CargoResult<()> { if !self.loaded { self.packages = read_packages(&self.path, self.source_id, self.gctx)?; + for (pkg_id, pkgs) in self.packages.iter() { + if 1 < pkgs.len() { + let ignored = pkgs[1..] + .iter() + // We can assume a package with publish = false isn't intended to be seen + // by users so we can hide the warning about those since the user is unlikely + // to care about those cases. + .filter(|pkg| pkg.publish().is_none()) + .collect::>(); + if !ignored.is_empty() { + use std::fmt::Write as _; + + let plural = if ignored.len() == 1 { "" } else { "s" }; + let mut msg = String::new(); + let _ = + writeln!(&mut msg, "skipping duplicate package{plural} `{pkg_id}`:"); + for ignored in ignored { + let manifest_path = ignored.manifest_path().display(); + let _ = writeln!(&mut msg, " {manifest_path}"); + } + let manifest_path = pkgs[0].manifest_path().display(); + let _ = writeln!(&mut msg, "in favor of {manifest_path}"); + let _ = self.gctx.shell().warn(msg); + } + } + } self.loaded = true; } @@ -311,7 +339,12 @@ impl<'gctx> Source for RecursivePathSource<'gctx> { f: &mut dyn FnMut(IndexSummary), ) -> Poll> { self.load()?; - for s in self.packages.values().map(|p| p.summary()) { + for s in self + .packages + .values() + .map(|pkgs| &pkgs[0]) + .map(|p| p.summary()) + { let matched = match kind { QueryKind::Exact => dep.matches(s), QueryKind::Alternatives => true, @@ -340,7 +373,7 @@ impl<'gctx> Source for RecursivePathSource<'gctx> { trace!("getting packages; id={}", id); self.load()?; let pkg = self.packages.get(&id); - pkg.cloned() + pkg.map(|pkgs| pkgs[0].clone()) .map(MaybePackage::Ready) .ok_or_else(|| internal(format!("failed to find {} in path source", id))) } @@ -758,7 +791,7 @@ fn read_packages( path: &Path, source_id: SourceId, gctx: &GlobalContext, -) -> CargoResult> { +) -> CargoResult>> { let mut all_packages = HashMap::new(); let mut visited = HashSet::::new(); let mut errors = Vec::::new(); @@ -899,7 +932,7 @@ fn has_manifest(path: &Path) -> bool { fn read_nested_packages( path: &Path, - all_packages: &mut HashMap, + all_packages: &mut HashMap>, source_id: SourceId, gctx: &GlobalContext, visited: &mut HashSet, @@ -938,24 +971,7 @@ fn read_nested_packages( let pkg = Package::new(manifest, &manifest_path); let pkg_id = pkg.package_id(); - use std::collections::hash_map::Entry; - match all_packages.entry(pkg_id) { - Entry::Vacant(v) => { - v.insert(pkg); - } - Entry::Occupied(_) => { - // We can assume a package with publish = false isn't intended to be seen - // by users so we can hide the warning about those since the user is unlikely - // to care about those cases. - if pkg.publish().is_none() { - let _ = gctx.shell().warn(format!( - "skipping duplicate package `{}` found at `{}`", - pkg.name(), - path.display() - )); - } - } - } + all_packages.entry(pkg_id).or_default().push(pkg); // Registry sources are not allowed to have `path=` dependencies because // they're all translated to actual registry dependencies. diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs index 3ffe937b4b4..4232704b803 100644 --- a/tests/testsuite/git.rs +++ b/tests/testsuite/git.rs @@ -1183,12 +1183,14 @@ fn ambiguous_published_deps() { p.cargo("build").run(); p.cargo("run") .with_stderr_data(str![[r#" -[WARNING] skipping duplicate package `duplicate` found at `[ROOT]/home/.cargo/git/checkouts/dep-[HASH]/e916fc8/duplicate2` +[WARNING] skipping duplicate package `duplicate v0.5.0 ([ROOTURL]/dep#[..])`: + [ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]/duplicate2/Cargo.toml +in favor of [ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]/duplicate1/Cargo.toml + [FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s [RUNNING] `target/debug/foo[EXE]` -"#]], - ) +"#]]) .run(); } @@ -1277,12 +1279,15 @@ fn unused_ambiguous_published_deps() { .with_stderr_data(str![[r#" [ERROR] invalid table header expected `.`, `]` - --> ../home/.cargo/git/checkouts/dep-[HASH]/caf7f52/invalid/Cargo.toml:2:29 + --> ../home/.cargo/git/checkouts/dep-[HASH]/[..]/invalid/Cargo.toml:2:29 | 2 | [package | ^ | -[WARNING] skipping duplicate package `duplicate` found at `[ROOT]/home/.cargo/git/checkouts/dep-[HASH]/caf7f52/duplicate2` +[WARNING] skipping duplicate package `duplicate v0.5.0 ([ROOTURL]/dep#[..])`: + [ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]/duplicate2/Cargo.toml +in favor of [ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]/duplicate1/Cargo.toml + [FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s [RUNNING] `target/debug/foo[EXE]` From 3124b0c33a4b9ac092e9e44db26a3b037606cf17 Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 11 Jul 2024 15:44:45 -0500 Subject: [PATCH 4/5] refactor(source): Delay the duplicate name check --- src/cargo/sources/path.rs | 77 ++++++++++++++++++++++++--------------- 1 file changed, 47 insertions(+), 30 deletions(-) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 0740b8c66d9..0242ef8563c 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -229,6 +229,8 @@ pub struct RecursivePathSource<'gctx> { /// /// Tracking all packages for a given ID to warn on-demand for unused packages packages: HashMap>, + /// Avoid redundant unused package warnings + warned_duplicate: HashSet, gctx: &'gctx GlobalContext, } @@ -247,6 +249,7 @@ impl<'gctx> RecursivePathSource<'gctx> { path: root.to_path_buf(), loaded: false, packages: Default::default(), + warned_duplicate: Default::default(), gctx, } } @@ -255,7 +258,13 @@ impl<'gctx> RecursivePathSource<'gctx> { /// filesystem if package information haven't yet loaded. pub fn read_packages(&mut self) -> CargoResult> { self.load()?; - Ok(self.packages.iter().map(|(_, v)| v[0].clone()).collect()) + Ok(self + .packages + .iter() + .map(|(pkg_id, v)| { + first_package(*pkg_id, v, &mut self.warned_duplicate, self.gctx).clone() + }) + .collect()) } /// List all files relevant to building this package inside this source. @@ -292,32 +301,6 @@ impl<'gctx> RecursivePathSource<'gctx> { pub fn load(&mut self) -> CargoResult<()> { if !self.loaded { self.packages = read_packages(&self.path, self.source_id, self.gctx)?; - for (pkg_id, pkgs) in self.packages.iter() { - if 1 < pkgs.len() { - let ignored = pkgs[1..] - .iter() - // We can assume a package with publish = false isn't intended to be seen - // by users so we can hide the warning about those since the user is unlikely - // to care about those cases. - .filter(|pkg| pkg.publish().is_none()) - .collect::>(); - if !ignored.is_empty() { - use std::fmt::Write as _; - - let plural = if ignored.len() == 1 { "" } else { "s" }; - let mut msg = String::new(); - let _ = - writeln!(&mut msg, "skipping duplicate package{plural} `{pkg_id}`:"); - for ignored in ignored { - let manifest_path = ignored.manifest_path().display(); - let _ = writeln!(&mut msg, " {manifest_path}"); - } - let manifest_path = pkgs[0].manifest_path().display(); - let _ = writeln!(&mut msg, "in favor of {manifest_path}"); - let _ = self.gctx.shell().warn(msg); - } - } - } self.loaded = true; } @@ -341,8 +324,10 @@ impl<'gctx> Source for RecursivePathSource<'gctx> { self.load()?; for s in self .packages - .values() - .map(|pkgs| &pkgs[0]) + .iter() + .map(|(pkg_id, pkgs)| { + first_package(*pkg_id, pkgs, &mut self.warned_duplicate, self.gctx) + }) .map(|p| p.summary()) { let matched = match kind { @@ -373,7 +358,7 @@ impl<'gctx> Source for RecursivePathSource<'gctx> { trace!("getting packages; id={}", id); self.load()?; let pkg = self.packages.get(&id); - pkg.map(|pkgs| pkgs[0].clone()) + pkg.map(|pkgs| first_package(id, pkgs, &mut self.warned_duplicate, self.gctx).clone()) .map(MaybePackage::Ready) .ok_or_else(|| internal(format!("failed to find {} in path source", id))) } @@ -417,6 +402,38 @@ impl<'gctx> Source for RecursivePathSource<'gctx> { } } +fn first_package<'p>( + pkg_id: PackageId, + pkgs: &'p Vec, + warned_duplicate: &mut HashSet, + gctx: &GlobalContext, +) -> &'p Package { + if pkgs.len() != 1 && warned_duplicate.insert(pkg_id) { + let ignored = pkgs[1..] + .iter() + // We can assume a package with publish = false isn't intended to be seen + // by users so we can hide the warning about those since the user is unlikely + // to care about those cases. + .filter(|pkg| pkg.publish().is_none()) + .collect::>(); + if !ignored.is_empty() { + use std::fmt::Write as _; + + let plural = if ignored.len() == 1 { "" } else { "s" }; + let mut msg = String::new(); + let _ = writeln!(&mut msg, "skipping duplicate package{plural} `{pkg_id}`:"); + for ignored in ignored { + let manifest_path = ignored.manifest_path().display(); + let _ = writeln!(&mut msg, " {manifest_path}"); + } + let manifest_path = pkgs[0].manifest_path().display(); + let _ = writeln!(&mut msg, "in favor of {manifest_path}"); + let _ = gctx.shell().warn(msg); + } + } + &pkgs[0] +} + /// List all files relevant to building this package inside this source. /// /// This function will use the appropriate methods to determine the From ed56f1ec1446b4687bbee6367ced876bb4be44f7 Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 11 Jul 2024 16:27:54 -0500 Subject: [PATCH 5/5] fix(source): Don't warn about unreferenced duplicate packages Fixes #10752 --- src/cargo/sources/path.rs | 1 + tests/testsuite/git.rs | 4 ---- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 0242ef8563c..85036c8101f 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -325,6 +325,7 @@ impl<'gctx> Source for RecursivePathSource<'gctx> { for s in self .packages .iter() + .filter(|(pkg_id, _)| pkg_id.name() == dep.package_name()) .map(|(pkg_id, pkgs)| { first_package(*pkg_id, pkgs, &mut self.warned_duplicate, self.gctx) }) diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs index 4232704b803..eee52795622 100644 --- a/tests/testsuite/git.rs +++ b/tests/testsuite/git.rs @@ -1284,10 +1284,6 @@ expected `.`, `]` 2 | [package | ^ | -[WARNING] skipping duplicate package `duplicate v0.5.0 ([ROOTURL]/dep#[..])`: - [ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]/duplicate2/Cargo.toml -in favor of [ROOT]/home/.cargo/git/checkouts/dep-[HASH]/[..]/duplicate1/Cargo.toml - [FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s [RUNNING] `target/debug/foo[EXE]`