diff -Nru cargo-0.53.0/Cargo.toml cargo-0.54.0/Cargo.toml --- cargo-0.53.0/Cargo.toml 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,6 @@ [package] name = "cargo" -version = "0.53.0" +version = "0.54.0" edition = "2018" authors = ["Yehuda Katz ", "Carl Lerche ", @@ -22,9 +22,9 @@ atty = "0.2" bytesize = "1.0" cargo-platform = { path = "crates/cargo-platform", version = "0.1.1" } +cargo-util = { path = "crates/cargo-util", version = "0.1.0" } crates-io = { path = "crates/crates-io", version = "0.33.0" } crossbeam-utils = "0.8" -crypto-hash = "0.3.1" curl = { version = "0.4.23", features = ["http2"] } curl-sys = "0.4.22" env_logger = "0.8.1" @@ -50,9 +50,8 @@ opener = "0.4" percent-encoding = "2.0" rustfix = "0.5.0" -same-file = "1" semver = { version = "0.10", features = ["serde"] } -serde = { version = "1.0.82", features = ["derive"] } +serde = { version = "1.0.123", features = ["derive"] } serde_ignored = "0.1.0" serde_json = { version = "1.0.30", features = ["raw_value"] } shell-escape = "0.1.4" @@ -75,11 +74,7 @@ rustc-workspace-hack = "1.0.0" rand = "0.8.3" -[target.'cfg(target_os = "macos")'.dependencies] -core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] } - [target.'cfg(windows)'.dependencies] -miow = "0.3.6" fwdansi = "1.1.0" [target.'cfg(windows)'.dependencies.winapi] diff -Nru cargo-0.53.0/CHANGELOG.md cargo-0.54.0/CHANGELOG.md --- cargo-0.53.0/CHANGELOG.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/CHANGELOG.md 2021-04-27 14:35:53.000000000 +0000 @@ -1,21 +1,118 @@ # Changelog -## Cargo 1.52 (2021-05-06) -[34170fcd...HEAD](https://github.com/rust-lang/cargo/compare/34170fcd...HEAD) +## Cargo 1.53 (2021-06-17) +[90691f2b...HEAD](https://github.com/rust-lang/cargo/compare/90691f2b...HEAD) ### Added ### Changed + - 🔥 Cargo now supports git repositories where the default `HEAD` branch is not "master". This also includes a switch to the version 3 `Cargo.lock` format which can handle default branches correctly. [#9133](https://github.com/rust-lang/cargo/pull/9133) +- 🔥 macOS targets now default to `unpacked` debuginfo. + [#9298](https://github.com/rust-lang/cargo/pull/9298) +- ❗ The `authors` field is no longer included in `Cargo.toml` for new + projects. + [#9282](https://github.com/rust-lang/cargo/pull/9282) +- `cargo update` may now work with the `--offline` flag. + [#9279](https://github.com/rust-lang/cargo/pull/9279) +- `cargo doc` will now erase the `doc` directory when switching between + different toolchain versions. There are shared, unversioned files (such as + the search index) that can become broken when using different versions. + [#8640](https://github.com/rust-lang/cargo/pull/8640) + +### Fixed + +### Nightly only +- Fixed config includes not working. + [#9299](https://github.com/rust-lang/cargo/pull/9299) +- Emit note when `--future-incompat-report` had nothing to report. + [#9263](https://github.com/rust-lang/cargo/pull/9263) +- Error messages for nightly features flags (like `-Z` and `cargo-features`) + should now provide more information. + [#9290](https://github.com/rust-lang/cargo/pull/9290) + +## Cargo 1.52 (2021-05-06) +[34170fcd...rust-1.52.0](https://github.com/rust-lang/cargo/compare/34170fcd...rust-1.52.0) + +### Added +- Added the `"manifest_path"` field to JSON messages for a package. + [#9022](https://github.com/rust-lang/cargo/pull/9022) + [#9247](https://github.com/rust-lang/cargo/pull/9247) + +### Changed +- Build scripts are now forbidden from setting `RUSTC_BOOTSTRAP` on stable. + [#9181](https://github.com/rust-lang/cargo/pull/9181) + [#9385](https://github.com/rust-lang/cargo/pull/9385) +- crates.io now supports SPDX 3.11 licenses. + [#9209](https://github.com/rust-lang/cargo/pull/9209) +- An error is now reported if `CARGO_TARGET_DIR` is an empty string. + [#8939](https://github.com/rust-lang/cargo/pull/8939) +- Doc tests now pass the `--message-format` flag into the test so that the + "short" format can now be used for doc tests. + [#9128](https://github.com/rust-lang/cargo/pull/9128) +- `cargo test` now prints a clearer indicator of which target is currently running. + [#9195](https://github.com/rust-lang/cargo/pull/9195) +- The `CARGO_TARGET_` environment variable will now issue a warning if + it is using lowercase letters. + [#9169](https://github.com/rust-lang/cargo/pull/9169) ### Fixed +- Fixed publication of packages with metadata and resolver fields in `Cargo.toml`. + [#9300](https://github.com/rust-lang/cargo/pull/9300) + [#9304](https://github.com/rust-lang/cargo/pull/9304) +- Fixed logic for determining prefer-dynamic for a dylib which differed in a + workspace vs a single package. + [#9252](https://github.com/rust-lang/cargo/pull/9252) +- Fixed an issue where exclusive target-specific dependencies that overlapped + across dependency kinds (like regular and build-dependencies) would + incorrectly include the dependencies in both. + [#9255](https://github.com/rust-lang/cargo/pull/9255) +- Fixed panic with certain styles of Package IDs when passed to the `-p` flag. + [#9188](https://github.com/rust-lang/cargo/pull/9188) +- When running cargo with output not going to a TTY, and with the progress bar + and color force-enabled, the output will now correctly clear the progress + line. + [#9231](https://github.com/rust-lang/cargo/pull/9231) +- Error instead of panic when JSON may contain non-utf8 paths. + [#9226](https://github.com/rust-lang/cargo/pull/9226) +- Fixed a hang that can happen on broken stderr. + [#9201](https://github.com/rust-lang/cargo/pull/9201) +- Fixed thin-local LTO not being disabled correctly when `lto=off` is set. + [#9182](https://github.com/rust-lang/cargo/pull/9182) ### Nightly only - The `strip` profile option now supports `true` and `false` values. [#9153](https://github.com/rust-lang/cargo/pull/9153) +- `cargo fix --edition` now displays a report when switching to 2021 if the + new resolver changes features. + [#9268](https://github.com/rust-lang/cargo/pull/9268) +- Added `[patch]` table support in `.cargo/config` files. + [#9204](https://github.com/rust-lang/cargo/pull/9204) +- Added `cargo describe-future-incompatibilities` for generating a report on + dependencies that contain future-incompatible warnings. + [#8825](https://github.com/rust-lang/cargo/pull/8825) +- Added easier support for testing the 2021 edition. + [#9184](https://github.com/rust-lang/cargo/pull/9184) +- Switch the default resolver to "2" in the 2021 edition. + [#9184](https://github.com/rust-lang/cargo/pull/9184) +- `cargo fix --edition` now supports 2021. + [#9184](https://github.com/rust-lang/cargo/pull/9184) +- Added `--print` flag to `cargo rustc` to pass along to `rustc` to display + information from rustc. + [#9002](https://github.com/rust-lang/cargo/pull/9002) +- Added `-Zdoctest-in-workspace` for changing the directory where doctests are + *run* versus where they are *compiled*. + [#9105](https://github.com/rust-lang/cargo/pull/9105) +- Added support for an `[env]` section in `.cargo/config.toml` to set + environment variables when running cargo. + [#9175](https://github.com/rust-lang/cargo/pull/9175) +- Added a schema field and `features2` field to the index. + [#9161](https://github.com/rust-lang/cargo/pull/9161) +- Changes to JSON spec targets will now trigger a rebuild. + [#9223](https://github.com/rust-lang/cargo/pull/9223) ## Cargo 1.51 (2021-03-25) [75d5d8cf...rust-1.51.0](https://github.com/rust-lang/cargo/compare/75d5d8cf...rust-1.51.0) @@ -72,6 +169,19 @@ [#9059](https://github.com/rust-lang/cargo/pull/9059) - Fixed to use `http.proxy` setting in `~/.gitconfig`. [#8986](https://github.com/rust-lang/cargo/pull/8986) +- Fixed --feature pkg/feat for V1 resolver for non-member. + [#9275](https://github.com/rust-lang/cargo/pull/9275) + [#9277](https://github.com/rust-lang/cargo/pull/9277) +- Fixed panic in `cargo doc` when there are colliding output filenames in a workspace. + [#9276](https://github.com/rust-lang/cargo/pull/9276) + [#9277](https://github.com/rust-lang/cargo/pull/9277) +- Fixed `cargo install` from exiting with success if one of several packages + did not install successfully. + [#9185](https://github.com/rust-lang/cargo/pull/9185) + [#9196](https://github.com/rust-lang/cargo/pull/9196) +- Fix panic with doc collision orphan. + [#9142](https://github.com/rust-lang/cargo/pull/9142) + [#9196](https://github.com/rust-lang/cargo/pull/9196) ### Nightly only - Removed the `publish-lockfile` unstable feature, it was stabilized without @@ -85,6 +195,9 @@ [#8922](https://github.com/rust-lang/cargo/pull/8922) - Added support for the `rust-version` field in project metadata. [#8037](https://github.com/rust-lang/cargo/pull/8037) +- Added a schema field to the index. + [#9161](https://github.com/rust-lang/cargo/pull/9161) + [#9196](https://github.com/rust-lang/cargo/pull/9196) ## Cargo 1.50 (2021-02-11) [8662ab42...rust-1.50.0](https://github.com/rust-lang/cargo/compare/8662ab42...rust-1.50.0) diff -Nru cargo-0.53.0/crates/cargo-test-support/build.rs cargo-0.54.0/crates/cargo-test-support/build.rs --- cargo-0.53.0/crates/cargo-test-support/build.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/build.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,6 @@ +fn main() { + println!( + "cargo:rustc-env=NATIVE_ARCH={}", + std::env::var("TARGET").unwrap() + ); +} diff -Nru cargo-0.53.0/crates/cargo-test-support/Cargo.toml cargo-0.54.0/crates/cargo-test-support/Cargo.toml --- cargo-0.53.0/crates/cargo-test-support/Cargo.toml 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -9,8 +9,9 @@ doctest = false [dependencies] -cargo = { path = "../.." } +anyhow = "1.0.34" cargo-test-macro = { path = "../cargo-test-macro" } +cargo-util = { path = "../cargo-util" } filetime = "0.2" flate2 = { version = "1.0", default-features = false, features = ["zlib"] } git2 = "0.13.16" diff -Nru cargo-0.53.0/crates/cargo-test-support/src/cross_compile.rs cargo-0.54.0/crates/cargo-test-support/src/cross_compile.rs --- cargo-0.53.0/crates/cargo-test-support/src/cross_compile.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/cross_compile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -10,8 +10,7 @@ //! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI. use crate::{basic_manifest, main_file, project}; -use cargo::util::ProcessError; -use cargo::CargoResult; +use cargo_util::ProcessError; use std::env; use std::fmt::Write; use std::process::{Command, Output}; @@ -41,7 +40,7 @@ let cross_target = alternate(); - let run_cross_test = || -> CargoResult { + let run_cross_test = || -> anyhow::Result { let p = project() .at("cross_test") .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0")) @@ -180,6 +179,23 @@ panic!("{}", message); } +/// The arch triple of the test-running host. +pub fn native() -> &'static str { + env!("NATIVE_ARCH") +} + +pub fn native_arch() -> &'static str { + match native() + .split("-") + .next() + .expect("Target triple has unexpected format") + { + "x86_64" => "x86_64", + "i686" => "x86", + _ => panic!("This test should be gated on cross_compile::disabled."), + } +} + /// The alternate target-triple to build with. /// /// Only use this function on tests that check `cross_compile::disabled`. @@ -205,6 +221,15 @@ } } +/// A target-triple that is neither the host nor the target. +/// +/// Rustc may not work with it and it's alright, apart from being a +/// valid target triple it is supposed to be used only as a +/// placeholder for targets that should not be considered. +pub fn unused() -> &'static str { + "wasm32-unknown-unknown" +} + /// Whether or not the host can run cross-compiled executables. pub fn can_run_on_host() -> bool { if disabled() { diff -Nru cargo-0.53.0/crates/cargo-test-support/src/lib.rs cargo-0.54.0/crates/cargo-test-support/src/lib.rs --- cargo-0.53.0/crates/cargo-test-support/src/lib.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,8 +2,9 @@ //! //! See https://rust-lang.github.io/cargo/contrib/ for a guide on writing tests. -#![allow(clippy::needless_doctest_main)] // according to @ehuss this lint is fussy -#![allow(clippy::inefficient_to_string)] // this causes suggestions that result in `(*s).to_string()` +#![allow(clippy::all)] +#![warn(clippy::needless_borrow)] +#![warn(clippy::redundant_clone)] use std::env; use std::ffi::OsStr; @@ -15,7 +16,7 @@ use std::str; use std::time::{self, Duration}; -use cargo::util::{is_ci, CargoResult, ProcessBuilder, ProcessError, Rustc}; +use cargo_util::{is_ci, ProcessBuilder, ProcessError}; use serde_json::{self, Value}; use url::Url; @@ -701,7 +702,7 @@ self } - pub fn exec_with_output(&mut self) -> CargoResult { + pub fn exec_with_output(&mut self) -> anyhow::Result { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); @@ -831,8 +832,8 @@ Some(_) => Err(format!( "exited with {:?}\n--- stdout\n{}\n--- stderr\n{}", code, - String::from_utf8_lossy(&stdout), - String::from_utf8_lossy(&stderr) + String::from_utf8_lossy(stdout), + String::from_utf8_lossy(stderr) )), } } @@ -1144,8 +1145,6 @@ } fn match_json(&self, expected: &str, line: &str) -> MatchResult { - let expected = self.normalize_matcher(expected); - let line = self.normalize_matcher(line); let actual = match line.parse() { Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)), Ok(actual) => actual, @@ -1155,7 +1154,8 @@ Ok(expected) => expected, }; - find_json_mismatch(&expected, &actual) + let cwd = self.process_builder.as_ref().and_then(|p| p.get_cwd()); + find_json_mismatch(&expected, &actual, cwd) } fn diff_lines<'a>( @@ -1333,8 +1333,12 @@ /// as paths). You can use a `"{...}"` string literal as a wildcard for /// arbitrary nested JSON (useful for parts of object emitted by other programs /// (e.g., rustc) rather than Cargo itself). -pub fn find_json_mismatch(expected: &Value, actual: &Value) -> Result<(), String> { - match find_json_mismatch_r(expected, actual) { +pub fn find_json_mismatch( + expected: &Value, + actual: &Value, + cwd: Option<&Path>, +) -> Result<(), String> { + match find_json_mismatch_r(expected, actual, cwd) { Some((expected_part, actual_part)) => Err(format!( "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", serde_json::to_string_pretty(expected).unwrap(), @@ -1349,12 +1353,21 @@ fn find_json_mismatch_r<'a>( expected: &'a Value, actual: &'a Value, + cwd: Option<&Path>, ) -> Option<(&'a Value, &'a Value)> { use serde_json::Value::*; match (expected, actual) { (&Number(ref l), &Number(ref r)) if l == r => None, (&Bool(l), &Bool(r)) if l == r => None, - (&String(ref l), &String(ref r)) if lines_match(l, r) => None, + (&String(ref l), _) if l == "{...}" => None, + (&String(ref l), &String(ref r)) => { + let normalized = normalize_matcher(r, cwd); + if lines_match(l, &normalized) { + None + } else { + Some((expected, actual)) + } + } (&Array(ref l), &Array(ref r)) => { if l.len() != r.len() { return Some((expected, actual)); @@ -1362,7 +1375,7 @@ l.iter() .zip(r.iter()) - .filter_map(|(l, r)| find_json_mismatch_r(l, r)) + .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Object(ref l), &Object(ref r)) => { @@ -1373,12 +1386,11 @@ l.values() .zip(r.values()) - .filter_map(|(l, r)| find_json_mismatch_r(l, r)) + .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Null, &Null) => None, // Magic string literal `"{...}"` acts as wildcard for any sub-JSON. - (&String(ref l), _) if l == "{...}" => None, _ => Some((expected, actual)), } } @@ -1548,33 +1560,52 @@ pub mod install; -thread_local!( -pub static RUSTC: Rustc = Rustc::new( - PathBuf::from("rustc"), - None, - None, - Path::new("should be path to rustup rustc, but we don't care in tests"), - None, -).unwrap() -); +struct RustcInfo { + verbose_version: String, + host: String, +} + +impl RustcInfo { + fn new() -> RustcInfo { + let output = ProcessBuilder::new("rustc") + .arg("-vV") + .exec_with_output() + .expect("rustc should exec"); + let verbose_version = String::from_utf8(output.stdout).expect("utf8 output"); + let host = verbose_version + .lines() + .filter_map(|line| line.strip_prefix("host: ")) + .next() + .expect("verbose version has host: field") + .to_string(); + RustcInfo { + verbose_version, + host, + } + } +} + +lazy_static::lazy_static! { + static ref RUSTC_INFO: RustcInfo = RustcInfo::new(); +} /// The rustc host such as `x86_64-unknown-linux-gnu`. -pub fn rustc_host() -> String { - RUSTC.with(|r| r.host.to_string()) +pub fn rustc_host() -> &'static str { + &RUSTC_INFO.host } pub fn is_nightly() -> bool { + let vv = &RUSTC_INFO.verbose_version; env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err() - && RUSTC - .with(|r| r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev")) + && (vv.contains("-nightly") || vv.contains("-dev")) } -pub fn process>(t: T) -> cargo::util::ProcessBuilder { +pub fn process>(t: T) -> ProcessBuilder { _process(t.as_ref()) } -fn _process(t: &OsStr) -> cargo::util::ProcessBuilder { - let mut p = cargo::util::process(t); +fn _process(t: &OsStr) -> ProcessBuilder { + let mut p = ProcessBuilder::new(t); // In general just clear out all cargo-specific configuration already in the // environment. Our tests all assume a "default configuration" unless @@ -1595,14 +1626,6 @@ p.env("PATH", new_path); } - if cfg!(target_os = "macos") { - // This makes the test suite run substantially faster. - p.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "unpacked") - .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "unpacked") - .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "unpacked") - .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "unpacked"); - } - p.cwd(&paths::root()) .env("HOME", paths::home()) .env("CARGO_HOME", paths::home().join(".cargo")) @@ -1643,7 +1666,7 @@ fn masquerade_as_nightly_cargo(&mut self) -> &mut Self; } -impl ChannelChanger for cargo::util::ProcessBuilder { +impl ChannelChanger for ProcessBuilder { fn masquerade_as_nightly_cargo(&mut self) -> &mut Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } diff -Nru cargo-0.53.0/crates/cargo-test-support/src/publish.rs cargo-0.54.0/crates/cargo-test-support/src/publish.rs --- cargo-0.53.0/crates/cargo-test-support/src/publish.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/publish.rs 2021-04-27 14:35:53.000000000 +0000 @@ -76,7 +76,7 @@ let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); - if let Err(e) = find_json_mismatch(&expected_json, &actual_json) { + if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) { panic!("{}", e); } diff -Nru cargo-0.53.0/crates/cargo-test-support/src/registry.rs cargo-0.54.0/crates/cargo-test-support/src/registry.rs --- cargo-0.53.0/crates/cargo-test-support/src/registry.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,6 @@ use crate::git::repo; use crate::paths; -use cargo::sources::CRATES_IO_INDEX; -use cargo::util::Sha256; +use cargo_util::Sha256; use flate2::write::GzEncoder; use flate2::Compression; use std::collections::BTreeMap; @@ -193,7 +192,7 @@ alt_dl_url(), self.alt_api_url .as_ref() - .map_or_else(alt_api_url, |url| Url::parse(&url).expect("valid url")), + .map_or_else(alt_api_url, |url| Url::parse(url).expect("valid url")), alt_api_path(), ); } @@ -560,7 +559,7 @@ /// Sets the index schema version for this package. /// - /// See [`cargo::sources::registry::RegistryPackage`] for more information. + /// See `cargo::sources::registry::RegistryPackage` for more information. pub fn schema_version(&mut self, version: u32) -> &mut Package { self.v = Some(version); self @@ -585,7 +584,9 @@ let registry_url = match (self.alternative, dep.registry.as_deref()) { (false, None) => None, (false, Some("alternative")) => Some(alt_registry_url().to_string()), - (true, None) => Some(CRATES_IO_INDEX.to_string()), + (true, None) => { + Some("https://github.com/rust-lang/crates.io-index".to_string()) + } (true, Some("alternative")) => None, _ => panic!("registry_dep currently only supports `alternative`"), }; diff -Nru cargo-0.53.0/crates/cargo-util/Cargo.toml cargo-0.54.0/crates/cargo-util/Cargo.toml --- cargo-0.53.0/crates/cargo-util/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,29 @@ +[package] +name = "cargo-util" +version = "0.1.0" +authors = ["The Cargo Project Developers"] +edition = "2018" +license = "MIT OR Apache-2.0" +homepage = "https://github.com/rust-lang/cargo" +repository = "https://github.com/rust-lang/cargo" +description = "Miscellaneous support code used by Cargo." + +[dependencies] +anyhow = "1.0.34" +crypto-hash = "0.3.1" +filetime = "0.2.9" +hex = "0.4.2" +jobserver = "0.1.21" +libc = "0.2.88" +log = "0.4.6" +same-file = "1.0.6" +shell-escape = "0.1.4" +tempfile = "3.1.0" +walkdir = "2.3.1" + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] } + +[target.'cfg(windows)'.dependencies] +miow = "0.3.6" +winapi = { version = "0.3.9", features = ["consoleapi", "minwindef"] } diff -Nru cargo-0.53.0/crates/cargo-util/LICENSE-APACHE cargo-0.54.0/crates/cargo-util/LICENSE-APACHE --- cargo-0.53.0/crates/cargo-util/LICENSE-APACHE 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/LICENSE-APACHE 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/LICENSE-2.0 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff -Nru cargo-0.53.0/crates/cargo-util/LICENSE-MIT cargo-0.54.0/crates/cargo-util/LICENSE-MIT --- cargo-0.53.0/crates/cargo-util/LICENSE-MIT 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/LICENSE-MIT 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff -Nru cargo-0.53.0/crates/cargo-util/src/lib.rs cargo-0.54.0/crates/cargo-util/src/lib.rs --- cargo-0.53.0/crates/cargo-util/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,17 @@ +//! Miscellaneous support code used by Cargo. + +pub use self::read2::read2; +pub use process_builder::ProcessBuilder; +pub use process_error::{exit_status_to_string, is_simple_exit_code, ProcessError}; +pub use sha256::Sha256; + +pub mod paths; +mod process_builder; +mod process_error; +mod read2; +mod sha256; + +/// Whether or not this running in a Continuous Integration environment. +pub fn is_ci() -> bool { + std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok() +} diff -Nru cargo-0.53.0/crates/cargo-util/src/paths.rs cargo-0.54.0/crates/cargo-util/src/paths.rs --- cargo-0.53.0/crates/cargo-util/src/paths.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/paths.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,698 @@ +//! Various utilities for working with files and paths. + +use anyhow::{Context, Result}; +use filetime::FileTime; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs::{self, File, OpenOptions}; +use std::io; +use std::io::prelude::*; +use std::iter; +use std::path::{Component, Path, PathBuf}; +use tempfile::Builder as TempFileBuilder; + +/// Joins paths into a string suitable for the `PATH` environment variable. +/// +/// This is equivalent to [`std::env::join_paths`], but includes a more +/// detailed error message. The given `env` argument is the name of the +/// environment variable this is will be used for, which is included in the +/// error message. +pub fn join_paths>(paths: &[T], env: &str) -> Result { + env::join_paths(paths.iter()) + .with_context(|| { + let paths = paths.iter().map(Path::new).collect::>(); + format!("failed to join path array: {:?}", paths) + }) + .with_context(|| { + format!( + "failed to join search paths together\n\ + Does ${} have an unterminated quote character?", + env + ) + }) +} + +/// Returns the name of the environment variable used for searching for +/// dynamic libraries. +pub fn dylib_path_envvar() -> &'static str { + if cfg!(windows) { + "PATH" + } else if cfg!(target_os = "macos") { + // When loading and linking a dynamic library or bundle, dlopen + // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and + // DYLD_FALLBACK_LIBRARY_PATH. + // In the Mach-O format, a dynamic library has an "install path." + // Clients linking against the library record this path, and the + // dynamic linker, dyld, uses it to locate the library. + // dyld searches DYLD_LIBRARY_PATH *before* the install path. + // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot + // find the library in the install path. + // Setting DYLD_LIBRARY_PATH can easily have unintended + // consequences. + // + // Also, DYLD_LIBRARY_PATH appears to have significant performance + // penalty starting in 10.13. Cargo's testsuite ran more than twice as + // slow with it on CI. + "DYLD_FALLBACK_LIBRARY_PATH" + } else { + "LD_LIBRARY_PATH" + } +} + +/// Returns a list of directories that are searched for dynamic libraries. +/// +/// Note that some operating systems will have defaults if this is empty that +/// will need to be dealt with. +pub fn dylib_path() -> Vec { + match env::var_os(dylib_path_envvar()) { + Some(var) => env::split_paths(&var).collect(), + None => Vec::new(), + } +} + +/// Normalize a path, removing things like `.` and `..`. +/// +/// CAUTION: This does not resolve symlinks (unlike +/// [`std::fs::canonicalize`]). This may cause incorrect or surprising +/// behavior at times. This should be used carefully. Unfortunately, +/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often +/// fail, or on Windows returns annoying device paths. This is a problem Cargo +/// needs to improve on. +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } + } + } + ret +} + +/// Returns the absolute path of where the given executable is located based +/// on searching the `PATH` environment variable. +/// +/// Returns an error if it cannot be found. +pub fn resolve_executable(exec: &Path) -> Result { + if exec.components().count() == 1 { + let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?; + let candidates = env::split_paths(&paths).flat_map(|path| { + let candidate = path.join(&exec); + let with_exe = if env::consts::EXE_EXTENSION.is_empty() { + None + } else { + Some(candidate.with_extension(env::consts::EXE_EXTENSION)) + }; + iter::once(candidate).chain(with_exe) + }); + for candidate in candidates { + if candidate.is_file() { + // PATH may have a component like "." in it, so we still need to + // canonicalize. + return Ok(candidate.canonicalize()?); + } + } + + anyhow::bail!("no executable for `{}` found in PATH", exec.display()) + } else { + Ok(exec.canonicalize()?) + } +} + +/// Reads a file to a string. +/// +/// Equivalent to [`std::fs::read_to_string`] with better error messages. +pub fn read(path: &Path) -> Result { + match String::from_utf8(read_bytes(path)?) { + Ok(s) => Ok(s), + Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()), + } +} + +/// Reads a file into a bytes vector. +/// +/// Equivalent to [`std::fs::read`] with better error messages. +pub fn read_bytes(path: &Path) -> Result> { + fs::read(path).with_context(|| format!("failed to read `{}`", path.display())) +} + +/// Writes a file to disk. +/// +/// Equivalent to [`std::fs::write`] with better error messages. +pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { + let path = path.as_ref(); + fs::write(path, contents.as_ref()) + .with_context(|| format!("failed to write `{}`", path.display())) +} + +/// Equivalent to [`write`], but does not write anything if the file contents +/// are identical to the given contents. +pub fn write_if_changed, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { + (|| -> Result<()> { + let contents = contents.as_ref(); + let mut f = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path)?; + let mut orig = Vec::new(); + f.read_to_end(&mut orig)?; + if orig != contents { + f.set_len(0)?; + f.seek(io::SeekFrom::Start(0))?; + f.write_all(contents)?; + } + Ok(()) + })() + .with_context(|| format!("failed to write `{}`", path.as_ref().display()))?; + Ok(()) +} + +/// Equivalent to [`write`], but appends to the end instead of replacing the +/// contents. +pub fn append(path: &Path, contents: &[u8]) -> Result<()> { + (|| -> Result<()> { + let mut f = OpenOptions::new() + .write(true) + .append(true) + .create(true) + .open(path)?; + + f.write_all(contents)?; + Ok(()) + })() + .with_context(|| format!("failed to write `{}`", path.display()))?; + Ok(()) +} + +/// Creates a new file. +pub fn create>(path: P) -> Result { + let path = path.as_ref(); + File::create(path).with_context(|| format!("failed to create file `{}`", path.display())) +} + +/// Opens an existing file. +pub fn open>(path: P) -> Result { + let path = path.as_ref(); + File::open(path).with_context(|| format!("failed to open file `{}`", path.display())) +} + +/// Returns the last modification time of a file. +pub fn mtime(path: &Path) -> Result { + let meta = + fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?; + Ok(FileTime::from_last_modification_time(&meta)) +} + +/// Returns the maximum mtime of the given path, recursing into +/// subdirectories, and following symlinks. +pub fn mtime_recursive(path: &Path) -> Result { + let meta = + fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?; + if !meta.is_dir() { + return Ok(FileTime::from_last_modification_time(&meta)); + } + let max_meta = walkdir::WalkDir::new(path) + .follow_links(true) + .into_iter() + .filter_map(|e| match e { + Ok(e) => Some(e), + Err(e) => { + // Ignore errors while walking. If Cargo can't access it, the + // build script probably can't access it, either. + log::debug!("failed to determine mtime while walking directory: {}", e); + None + } + }) + .filter_map(|e| { + if e.path_is_symlink() { + // Use the mtime of both the symlink and its target, to + // handle the case where the symlink is modified to a + // different target. + let sym_meta = match std::fs::symlink_metadata(e.path()) { + Ok(m) => m, + Err(err) => { + // I'm not sure when this is really possible (maybe a + // race with unlinking?). Regardless, if Cargo can't + // read it, the build script probably can't either. + log::debug!( + "failed to determine mtime while fetching symlink metdata of {}: {}", + e.path().display(), + err + ); + return None; + } + }; + let sym_mtime = FileTime::from_last_modification_time(&sym_meta); + // Walkdir follows symlinks. + match e.metadata() { + Ok(target_meta) => { + let target_mtime = FileTime::from_last_modification_time(&target_meta); + Some(sym_mtime.max(target_mtime)) + } + Err(err) => { + // Can't access the symlink target. If Cargo can't + // access it, the build script probably can't access + // it either. + log::debug!( + "failed to determine mtime of symlink target for {}: {}", + e.path().display(), + err + ); + Some(sym_mtime) + } + } + } else { + let meta = match e.metadata() { + Ok(m) => m, + Err(err) => { + // I'm not sure when this is really possible (maybe a + // race with unlinking?). Regardless, if Cargo can't + // read it, the build script probably can't either. + log::debug!( + "failed to determine mtime while fetching metadata of {}: {}", + e.path().display(), + err + ); + return None; + } + }; + Some(FileTime::from_last_modification_time(&meta)) + } + }) + .max() + // or_else handles the case where there are no files in the directory. + .unwrap_or_else(|| FileTime::from_last_modification_time(&meta)); + Ok(max_meta) +} + +/// Record the current time on the filesystem (using the filesystem's clock) +/// using a file at the given directory. Returns the current time. +pub fn set_invocation_time(path: &Path) -> Result { + // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, + // then this can be removed. + let timestamp = path.join("invoked.timestamp"); + write( + ×tamp, + "This file has an mtime of when this was started.", + )?; + let ft = mtime(×tamp)?; + log::debug!("invocation time for {:?} is {}", path, ft); + Ok(ft) +} + +/// Converts a path to UTF-8 bytes. +pub fn path2bytes(path: &Path) -> Result<&[u8]> { + #[cfg(unix)] + { + use std::os::unix::prelude::*; + Ok(path.as_os_str().as_bytes()) + } + #[cfg(windows)] + { + match path.as_os_str().to_str() { + Some(s) => Ok(s.as_bytes()), + None => Err(anyhow::format_err!( + "invalid non-unicode path: {}", + path.display() + )), + } + } +} + +/// Converts UTF-8 bytes to a path. +pub fn bytes2path(bytes: &[u8]) -> Result { + #[cfg(unix)] + { + use std::os::unix::prelude::*; + Ok(PathBuf::from(OsStr::from_bytes(bytes))) + } + #[cfg(windows)] + { + use std::str; + match str::from_utf8(bytes) { + Ok(s) => Ok(PathBuf::from(s)), + Err(..) => Err(anyhow::format_err!("invalid non-unicode path")), + } + } +} + +/// Returns an iterator that walks up the directory hierarchy towards the root. +/// +/// Each item is a [`Path`]. It will start with the given path, finishing at +/// the root. If the `stop_root_at` parameter is given, it will stop at the +/// given path (which will be the last item). +pub fn ancestors<'a>(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { + PathAncestors::new(path, stop_root_at) +} + +pub struct PathAncestors<'a> { + current: Option<&'a Path>, + stop_at: Option, +} + +impl<'a> PathAncestors<'a> { + fn new(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { + let stop_at = env::var("__CARGO_TEST_ROOT") + .ok() + .map(PathBuf::from) + .or_else(|| stop_root_at.map(|p| p.to_path_buf())); + PathAncestors { + current: Some(path), + //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. + stop_at, + } + } +} + +impl<'a> Iterator for PathAncestors<'a> { + type Item = &'a Path; + + fn next(&mut self) -> Option<&'a Path> { + if let Some(path) = self.current { + self.current = path.parent(); + + if let Some(ref stop_at) = self.stop_at { + if path == stop_at { + self.current = None; + } + } + + Some(path) + } else { + None + } + } +} + +/// Equivalent to [`std::fs::create_dir_all`] with better error messages. +pub fn create_dir_all(p: impl AsRef) -> Result<()> { + _create_dir_all(p.as_ref()) +} + +fn _create_dir_all(p: &Path) -> Result<()> { + fs::create_dir_all(p) + .with_context(|| format!("failed to create directory `{}`", p.display()))?; + Ok(()) +} + +/// Recursively remove all files and directories at the given directory. +/// +/// This does *not* follow symlinks. +pub fn remove_dir_all>(p: P) -> Result<()> { + _remove_dir_all(p.as_ref()) +} + +fn _remove_dir_all(p: &Path) -> Result<()> { + if p.symlink_metadata() + .with_context(|| format!("could not get metadata for `{}` to remove", p.display()))? + .file_type() + .is_symlink() + { + return remove_file(p); + } + let entries = p + .read_dir() + .with_context(|| format!("failed to read directory `{}`", p.display()))?; + for entry in entries { + let entry = entry?; + let path = entry.path(); + if entry.file_type()?.is_dir() { + remove_dir_all(&path)?; + } else { + remove_file(&path)?; + } + } + remove_dir(&p) +} + +/// Equivalent to [`std::fs::remove_dir`] with better error messages. +pub fn remove_dir>(p: P) -> Result<()> { + _remove_dir(p.as_ref()) +} + +fn _remove_dir(p: &Path) -> Result<()> { + fs::remove_dir(p).with_context(|| format!("failed to remove directory `{}`", p.display()))?; + Ok(()) +} + +/// Equivalent to [`std::fs::remove_file`] with better error messages. +/// +/// If the file is readonly, this will attempt to change the permissions to +/// force the file to be deleted. +pub fn remove_file>(p: P) -> Result<()> { + _remove_file(p.as_ref()) +} + +fn _remove_file(p: &Path) -> Result<()> { + let mut err = match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { + match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => err = e, + } + } + + Err(err).with_context(|| format!("failed to remove file `{}`", p.display()))?; + Ok(()) +} + +fn set_not_readonly(p: &Path) -> io::Result { + let mut perms = p.metadata()?.permissions(); + if !perms.readonly() { + return Ok(false); + } + perms.set_readonly(false); + fs::set_permissions(p, perms)?; + Ok(true) +} + +/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it. +/// +/// If the destination already exists, it is removed before linking. +pub fn link_or_copy(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let src = src.as_ref(); + let dst = dst.as_ref(); + _link_or_copy(src, dst) +} + +fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> { + log::debug!("linking {} to {}", src.display(), dst.display()); + if same_file::is_same_file(src, dst).unwrap_or(false) { + return Ok(()); + } + + // NB: we can't use dst.exists(), as if dst is a broken symlink, + // dst.exists() will return false. This is problematic, as we still need to + // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us + // whether dst exists *without* following symlinks, which is what we want. + if fs::symlink_metadata(dst).is_ok() { + remove_file(&dst)?; + } + + let link_result = if src.is_dir() { + #[cfg(target_os = "redox")] + use std::os::redox::fs::symlink; + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(windows)] + // FIXME: This should probably panic or have a copy fallback. Symlinks + // are not supported in all windows environments. Currently symlinking + // is only used for .dSYM directories on macos, but this shouldn't be + // accidentally relied upon. + use std::os::windows::fs::symlink_dir as symlink; + + let dst_dir = dst.parent().unwrap(); + let src = if src.starts_with(dst_dir) { + src.strip_prefix(dst_dir).unwrap() + } else { + src + }; + symlink(src, dst) + } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() { + // This is a work-around for a bug in macOS 10.15. When running on + // APFS, there seems to be a strange race condition with + // Gatekeeper where it will forcefully kill a process launched via + // `cargo run` with SIGKILL. Copying seems to avoid the problem. + // This shouldn't affect anyone except Cargo's test suite because + // it is very rare, and only seems to happen under heavy load and + // rapidly creating lots of executables and running them. + // See https://github.com/rust-lang/cargo/issues/7821 for the + // gory details. + fs::copy(src, dst).map(|_| ()) + } else { + fs::hard_link(src, dst) + }; + link_result + .or_else(|err| { + log::debug!("link failed {}. falling back to fs::copy", err); + fs::copy(src, dst).map(|_| ()) + }) + .with_context(|| { + format!( + "failed to link or copy `{}` to `{}`", + src.display(), + dst.display() + ) + })?; + Ok(()) +} + +/// Copies a file from one location to another. +/// +/// Equivalent to [`std::fs::copy`] with better error messages. +pub fn copy, Q: AsRef>(from: P, to: Q) -> Result { + let from = from.as_ref(); + let to = to.as_ref(); + fs::copy(from, to) + .with_context(|| format!("failed to copy `{}` to `{}`", from.display(), to.display())) +} + +/// Changes the filesystem mtime (and atime if possible) for the given file. +/// +/// This intentionally does not return an error, as this is sometimes not +/// supported on network filesystems. For the current uses in Cargo, this is a +/// "best effort" approach, and errors shouldn't be propagated. +pub fn set_file_time_no_err>(path: P, time: FileTime) { + let path = path.as_ref(); + match filetime::set_file_times(path, time, time) { + Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time), + Err(e) => log::warn!( + "could not set mtime of {} to {}: {:?}", + path.display(), + time, + e + ), + } +} + +/// Strips `base` from `path`. +/// +/// This canonicalizes both paths before stripping. This is useful if the +/// paths are obtained in different ways, and one or the other may or may not +/// have been normalized in some way. +pub fn strip_prefix_canonical>( + path: P, + base: P, +) -> Result { + // Not all filesystems support canonicalize. Just ignore if it doesn't work. + let safe_canonicalize = |path: &Path| match path.canonicalize() { + Ok(p) => p, + Err(e) => { + log::warn!("cannot canonicalize {:?}: {:?}", path, e); + path.to_path_buf() + } + }; + let canon_path = safe_canonicalize(path.as_ref()); + let canon_base = safe_canonicalize(base.as_ref()); + canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf()) +} + +/// Creates an excluded from cache directory atomically with its parents as needed. +/// +/// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing +/// parent directories will not be created in an atomic manner. +/// +/// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it +/// already exists. +pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef) -> Result<()> { + let path = p.as_ref(); + if path.is_dir() { + return Ok(()); + } + + let parent = path.parent().unwrap(); + let base = path.file_name().unwrap(); + create_dir_all(parent)?; + // We do this in two steps (first create a temporary directory and exlucde + // it from backups, then rename it to the desired name. If we created the + // directory directly where it should be and then excluded it from backups + // we would risk a situation where cargo is interrupted right after the directory + // creation but before the exclusion the the directory would remain non-excluded from + // backups because we only perform exclusion right after we created the directory + // ourselves. + // + // We need the tempdir created in parent instead of $TMP, because only then we can be + // easily sure that rename() will succeed (the new name needs to be on the same mount + // point as the old one). + let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?; + exclude_from_backups(tempdir.path()); + // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used + // here to create the directory directly and fs::create_dir_all() explicitly treats + // the directory being created concurrently by another thread or process as success, + // hence the check below to follow the existing behavior. If we get an error at + // rename() and suddently the directory (which didn't exist a moment earlier) exists + // we can infer from it it's another cargo process doing work. + if let Err(e) = fs::rename(tempdir.path(), path) { + if !path.exists() { + return Err(anyhow::Error::from(e)); + } + } + Ok(()) +} + +/// Marks the directory as excluded from archives/backups. +/// +/// This is recommended to prevent derived/temporary files from bloating backups. There are two +/// mechanisms used to achieve this right now: +/// +/// * A dedicated resource property excluding from Time Machine backups on macOS +/// * CACHEDIR.TAG files supported by various tools in a platform-independent way +fn exclude_from_backups(path: &Path) { + exclude_from_time_machine(path); + let _ = std::fs::write( + path.join("CACHEDIR.TAG"), + "Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by cargo. +# For information about cache directory tags see https://bford.info/cachedir/ +", + ); + // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature. +} + +#[cfg(not(target_os = "macos"))] +fn exclude_from_time_machine(_: &Path) {} + +#[cfg(target_os = "macos")] +/// Marks files or directories as excluded from Time Machine on macOS +fn exclude_from_time_machine(path: &Path) { + use core_foundation::base::TCFType; + use core_foundation::{number, string, url}; + use std::ptr; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + let path = url::CFURL::from_path(path, false); + if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { + unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + } + } + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working +} diff -Nru cargo-0.53.0/crates/cargo-util/src/process_builder.rs cargo-0.54.0/crates/cargo-util/src/process_builder.rs --- cargo-0.53.0/crates/cargo-util/src/process_builder.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/process_builder.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,417 @@ +use crate::process_error::ProcessError; +use crate::read2; +use anyhow::{bail, Context, Result}; +use jobserver::Client; +use shell_escape::escape; +use std::collections::BTreeMap; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fmt; +use std::iter::once; +use std::path::Path; +use std::process::{Command, Output, Stdio}; + +/// A builder object for an external process, similar to [`std::process::Command`]. +#[derive(Clone, Debug)] +pub struct ProcessBuilder { + /// The program to execute. + program: OsString, + /// A list of arguments to pass to the program. + args: Vec, + /// Any environment variables that should be set for the program. + env: BTreeMap>, + /// The directory to run the program from. + cwd: Option, + /// The `make` jobserver. See the [jobserver crate] for + /// more information. + /// + /// [jobserver crate]: https://docs.rs/jobserver/ + jobserver: Option, + /// `true` to include environment variable in display. + display_env_vars: bool, +} + +impl fmt::Display for ProcessBuilder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "`")?; + + if self.display_env_vars { + for (key, val) in self.env.iter() { + if let Some(val) = val { + let val = escape(val.to_string_lossy()); + if cfg!(windows) { + write!(f, "set {}={}&& ", key, val)?; + } else { + write!(f, "{}={} ", key, val)?; + } + } + } + } + + write!(f, "{}", self.program.to_string_lossy())?; + + for arg in &self.args { + write!(f, " {}", escape(arg.to_string_lossy()))?; + } + + write!(f, "`") + } +} + +impl ProcessBuilder { + /// Creates a new [`ProcessBuilder`] with the given executable path. + pub fn new>(cmd: T) -> ProcessBuilder { + ProcessBuilder { + program: cmd.as_ref().to_os_string(), + args: Vec::new(), + cwd: None, + env: BTreeMap::new(), + jobserver: None, + display_env_vars: false, + } + } + + /// (chainable) Sets the executable for the process. + pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { + self.program = program.as_ref().to_os_string(); + self + } + + /// (chainable) Adds `arg` to the args list. + pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { + self.args.push(arg.as_ref().to_os_string()); + self + } + + /// (chainable) Adds multiple `args` to the args list. + pub fn args>(&mut self, args: &[T]) -> &mut ProcessBuilder { + self.args + .extend(args.iter().map(|t| t.as_ref().to_os_string())); + self + } + + /// (chainable) Replaces the args list with the given `args`. + pub fn args_replace>(&mut self, args: &[T]) -> &mut ProcessBuilder { + self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect(); + self + } + + /// (chainable) Sets the current working directory of the process. + pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { + self.cwd = Some(path.as_ref().to_os_string()); + self + } + + /// (chainable) Sets an environment variable for the process. + pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { + self.env + .insert(key.to_string(), Some(val.as_ref().to_os_string())); + self + } + + /// (chainable) Unsets an environment variable for the process. + pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), None); + self + } + + /// Gets the executable name. + pub fn get_program(&self) -> &OsString { + &self.program + } + + /// Gets the program arguments. + pub fn get_args(&self) -> &[OsString] { + &self.args + } + + /// Gets the current working directory for the process. + pub fn get_cwd(&self) -> Option<&Path> { + self.cwd.as_ref().map(Path::new) + } + + /// Gets an environment variable as the process will see it (will inherit from environment + /// unless explicitally unset). + pub fn get_env(&self, var: &str) -> Option { + self.env + .get(var) + .cloned() + .or_else(|| Some(env::var_os(var))) + .and_then(|s| s) + } + + /// Gets all environment variables explicitly set or unset for the process (not inherited + /// vars). + pub fn get_envs(&self) -> &BTreeMap> { + &self.env + } + + /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { + self.jobserver = Some(jobserver.clone()); + self + } + + /// Enables environment variable display. + pub fn display_env_vars(&mut self) -> &mut Self { + self.display_env_vars = true; + self + } + + /// Runs the process, waiting for completion, and mapping non-success exit codes to an error. + pub fn exec(&self) -> Result<()> { + let mut command = self.build_command(); + let exit = command.status().with_context(|| { + ProcessError::new(&format!("could not execute process {}", self), None, None) + })?; + + if exit.success() { + Ok(()) + } else { + Err(ProcessError::new( + &format!("process didn't exit successfully: {}", self), + Some(exit), + None, + ) + .into()) + } + } + + /// Replaces the current process with the target process. + /// + /// On Unix, this executes the process using the Unix syscall `execvp`, which will block + /// this process, and will only return if there is an error. + /// + /// On Windows this isn't technically possible. Instead we emulate it to the best of our + /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler. + /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C + /// handling to the application at hand, which will either terminate or handle it itself. + /// According to Microsoft's documentation at + /// . + /// the Ctrl-C signal is sent to all processes attached to a terminal, which should + /// include our child process. If the child terminates then we'll reap them in Cargo + /// pretty quickly, and if the child handles the signal then we won't terminate + /// (and we shouldn't!) until the process itself later exits. + pub fn exec_replace(&self) -> Result<()> { + imp::exec_replace(self) + } + + /// Executes the process, returning the stdio output, or an error if non-zero exit status. + pub fn exec_with_output(&self) -> Result { + let mut command = self.build_command(); + + let output = command.output().with_context(|| { + ProcessError::new(&format!("could not execute process {}", self), None, None) + })?; + + if output.status.success() { + Ok(output) + } else { + Err(ProcessError::new( + &format!("process didn't exit successfully: {}", self), + Some(output.status), + Some(&output), + ) + .into()) + } + } + + /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which + /// can mutate the string data. + /// + /// If any invocations of these function return an error, it will be propagated. + /// + /// If `capture_output` is true, then all the output will also be buffered + /// and stored in the returned `Output` object. If it is false, no caching + /// is done, and the callbacks are solely responsible for handling the + /// output. + pub fn exec_with_streaming( + &self, + on_stdout_line: &mut dyn FnMut(&str) -> Result<()>, + on_stderr_line: &mut dyn FnMut(&str) -> Result<()>, + capture_output: bool, + ) -> Result { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + + let mut cmd = self.build_command(); + cmd.stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .stdin(Stdio::null()); + + let mut callback_error = None; + let status = (|| { + let mut child = cmd.spawn()?; + let out = child.stdout.take().unwrap(); + let err = child.stderr.take().unwrap(); + read2(out, err, &mut |is_out, data, eof| { + let idx = if eof { + data.len() + } else { + match data.iter().rposition(|b| *b == b'\n') { + Some(i) => i + 1, + None => return, + } + }; + { + // scope for new_lines + let new_lines = if capture_output { + let dst = if is_out { &mut stdout } else { &mut stderr }; + let start = dst.len(); + let data = data.drain(..idx); + dst.extend(data); + &dst[start..] + } else { + &data[..idx] + }; + for line in String::from_utf8_lossy(new_lines).lines() { + if callback_error.is_some() { + break; + } + let callback_result = if is_out { + on_stdout_line(line) + } else { + on_stderr_line(line) + }; + if let Err(e) = callback_result { + callback_error = Some(e); + } + } + } + if !capture_output { + data.drain(..idx); + } + })?; + child.wait() + })() + .with_context(|| { + ProcessError::new(&format!("could not execute process {}", self), None, None) + })?; + let output = Output { + status, + stdout, + stderr, + }; + + { + let to_print = if capture_output { Some(&output) } else { None }; + if let Some(e) = callback_error { + let cx = ProcessError::new( + &format!("failed to parse process output: {}", self), + Some(output.status), + to_print, + ); + bail!(anyhow::Error::new(cx).context(e)); + } else if !output.status.success() { + bail!(ProcessError::new( + &format!("process didn't exit successfully: {}", self), + Some(output.status), + to_print, + )); + } + } + + Ok(output) + } + + /// Converts `ProcessBuilder` into a `std::process::Command`, and handles the jobserver, if + /// present. + pub fn build_command(&self) -> Command { + let mut command = Command::new(&self.program); + if let Some(cwd) = self.get_cwd() { + command.current_dir(cwd); + } + for arg in &self.args { + command.arg(arg); + } + for (k, v) in &self.env { + match *v { + Some(ref v) => { + command.env(k, v); + } + None => { + command.env_remove(k); + } + } + } + if let Some(ref c) = self.jobserver { + c.configure(&mut command); + } + command + } + + /// Wraps an existing command with the provided wrapper, if it is present and valid. + /// + /// # Examples + /// + /// ```rust + /// use cargo_util::ProcessBuilder; + /// // Running this would execute `rustc` + /// let cmd = ProcessBuilder::new("rustc"); + /// + /// // Running this will execute `sccache rustc` + /// let cmd = cmd.wrapped(Some("sccache")); + /// ``` + pub fn wrapped(mut self, wrapper: Option>) -> Self { + let wrapper = if let Some(wrapper) = wrapper.as_ref() { + wrapper.as_ref() + } else { + return self; + }; + + if wrapper.is_empty() { + return self; + } + + let args = once(self.program).chain(self.args.into_iter()).collect(); + + self.program = wrapper.to_os_string(); + self.args = args; + + self + } +} + +#[cfg(unix)] +mod imp { + use super::{ProcessBuilder, ProcessError}; + use anyhow::Result; + use std::os::unix::process::CommandExt; + + pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> { + let mut command = process_builder.build_command(); + let error = command.exec(); + Err(anyhow::Error::from(error).context(ProcessError::new( + &format!("could not execute process {}", process_builder), + None, + None, + ))) + } +} + +#[cfg(windows)] +mod imp { + use super::{ProcessBuilder, ProcessError}; + use anyhow::Result; + use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; + use winapi::um::consoleapi::SetConsoleCtrlHandler; + + unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL { + // Do nothing; let the child process handle it. + TRUE + } + + pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> { + unsafe { + if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { + return Err(ProcessError::new("Could not set Ctrl-C handler.", None, None).into()); + } + } + + // Just execute the process as normal. + process_builder.exec() + } +} diff -Nru cargo-0.53.0/crates/cargo-util/src/process_error.rs cargo-0.54.0/crates/cargo-util/src/process_error.rs --- cargo-0.53.0/crates/cargo-util/src/process_error.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/process_error.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,194 @@ +//! Error value for [`crate::ProcessBuilder`] when a process fails. + +use std::fmt; +use std::process::{ExitStatus, Output}; +use std::str; + +#[derive(Debug)] +pub struct ProcessError { + /// A detailed description to show to the user why the process failed. + pub desc: String, + + /// The exit status of the process. + /// + /// This can be `None` if the process failed to launch (like process not + /// found) or if the exit status wasn't a code but was instead something + /// like termination via a signal. + pub code: Option, + + /// The stdout from the process. + /// + /// This can be `None` if the process failed to launch, or the output was + /// not captured. + pub stdout: Option>, + + /// The stderr from the process. + /// + /// This can be `None` if the process failed to launch, or the output was + /// not captured. + pub stderr: Option>, +} + +impl fmt::Display for ProcessError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.desc.fmt(f) + } +} + +impl std::error::Error for ProcessError {} + +impl ProcessError { + /// Creates a new [`ProcessError`]. + /// + /// * `status` can be `None` if the process did not launch. + /// * `output` can be `None` if the process did not launch, or output was not captured. + pub fn new(msg: &str, status: Option, output: Option<&Output>) -> ProcessError { + let exit = match status { + Some(s) => exit_status_to_string(s), + None => "never executed".to_string(), + }; + + Self::new_raw( + msg, + status.and_then(|s| s.code()), + &exit, + output.map(|s| s.stdout.as_slice()), + output.map(|s| s.stderr.as_slice()), + ) + } + + /// Creates a new [`ProcessError`] with the raw output data. + /// + /// * `code` can be `None` for situations like being killed by a signal on unix. + pub fn new_raw( + msg: &str, + code: Option, + status: &str, + stdout: Option<&[u8]>, + stderr: Option<&[u8]>, + ) -> ProcessError { + let mut desc = format!("{} ({})", msg, status); + + if let Some(out) = stdout { + match str::from_utf8(out) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stdout\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + } + if let Some(out) = stderr { + match str::from_utf8(out) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stderr\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + } + + ProcessError { + desc, + code, + stdout: stdout.map(|s| s.to_vec()), + stderr: stderr.map(|s| s.to_vec()), + } + } +} + +/// Converts an [`ExitStatus`] to a human-readable string suitable for +/// displaying to a user. +pub fn exit_status_to_string(status: ExitStatus) -> String { + return status_to_string(status); + + #[cfg(unix)] + fn status_to_string(status: ExitStatus) -> String { + use std::os::unix::process::*; + + if let Some(signal) = status.signal() { + let name = match signal as libc::c_int { + libc::SIGABRT => ", SIGABRT: process abort signal", + libc::SIGALRM => ", SIGALRM: alarm clock", + libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", + libc::SIGHUP => ", SIGHUP: hangup", + libc::SIGILL => ", SIGILL: illegal instruction", + libc::SIGINT => ", SIGINT: terminal interrupt signal", + libc::SIGKILL => ", SIGKILL: kill", + libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", + libc::SIGQUIT => ", SIGQUIT: terminal quit signal", + libc::SIGSEGV => ", SIGSEGV: invalid memory reference", + libc::SIGTERM => ", SIGTERM: termination signal", + libc::SIGBUS => ", SIGBUS: access to undefined memory", + #[cfg(not(target_os = "haiku"))] + libc::SIGSYS => ", SIGSYS: bad system call", + libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", + _ => "", + }; + format!("signal: {}{}", signal, name) + } else { + status.to_string() + } + } + + #[cfg(windows)] + fn status_to_string(status: ExitStatus) -> String { + use winapi::shared::minwindef::DWORD; + use winapi::um::winnt::*; + + let mut base = status.to_string(); + let extra = match status.code().unwrap() as DWORD { + STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", + STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", + STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", + STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", + STATUS_NO_MEMORY => "STATUS_NO_MEMORY", + STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", + STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", + STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", + STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", + STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", + STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", + STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", + STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", + STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", + STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", + STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", + STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", + STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", + STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", + STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", + STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", + STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", + STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", + STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", + STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", + STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", + STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", + STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", + STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", + STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", + STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", + _ => return base, + }; + base.push_str(", "); + base.push_str(extra); + base + } +} + +/// Returns `true` if the given process exit code is something a normal +/// process would exit with. +/// +/// This helps differentiate from abnormal termination codes, such as +/// segmentation faults or signals. +pub fn is_simple_exit_code(code: i32) -> bool { + // Typical unix exit codes are 0 to 127. + // Windows doesn't have anything "typical", and is a + // 32-bit number (which appears signed here, but is really + // unsigned). However, most of the interesting NTSTATUS + // codes are very large. This is just a rough + // approximation of which codes are "normal" and which + // ones are abnormal termination. + code >= 0 && code <= 127 +} diff -Nru cargo-0.53.0/crates/cargo-util/src/read2.rs cargo-0.54.0/crates/cargo-util/src/read2.rs --- cargo-0.53.0/crates/cargo-util/src/read2.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/read2.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,178 @@ +pub use self::imp::read2; + +#[cfg(unix)] +mod imp { + use std::io; + use std::io::prelude::*; + use std::mem; + use std::os::unix::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + + pub fn read2( + mut out_pipe: ChildStdout, + mut err_pipe: ChildStderr, + data: &mut dyn FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + unsafe { + libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + } + + let mut out_done = false; + let mut err_done = false; + let mut out = Vec::new(); + let mut err = Vec::new(); + + let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; + fds[0].fd = out_pipe.as_raw_fd(); + fds[0].events = libc::POLLIN; + fds[1].fd = err_pipe.as_raw_fd(); + fds[1].events = libc::POLLIN; + let mut nfds = 2; + let mut errfd = 1; + + while nfds > 0 { + // wait for either pipe to become readable using `select` + let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; + if r == -1 { + let err = io::Error::last_os_error(); + if err.kind() == io::ErrorKind::Interrupted { + continue; + } + return Err(err); + } + + // Read as much as we can from each pipe, ignoring EWOULDBLOCK or + // EAGAIN. If we hit EOF, then this will happen because the underlying + // reader will return Ok(0), in which case we'll see `Ok` ourselves. In + // this case we flip the other fd back into blocking mode and read + // whatever's leftover on that file descriptor. + let handle = |res: io::Result<_>| match res { + Ok(_) => Ok(true), + Err(e) => { + if e.kind() == io::ErrorKind::WouldBlock { + Ok(false) + } else { + Err(e) + } + } + }; + if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { + err_done = true; + nfds -= 1; + } + data(false, &mut err, err_done); + if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { + out_done = true; + fds[0].fd = err_pipe.as_raw_fd(); + errfd = 0; + nfds -= 1; + } + data(true, &mut out, out_done); + } + Ok(()) + } +} + +#[cfg(windows)] +mod imp { + use std::io; + use std::os::windows::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + use std::slice; + + use miow::iocp::{CompletionPort, CompletionStatus}; + use miow::pipe::NamedPipe; + use miow::Overlapped; + use winapi::shared::winerror::ERROR_BROKEN_PIPE; + + struct Pipe<'a> { + dst: &'a mut Vec, + overlapped: Overlapped, + pipe: NamedPipe, + done: bool, + } + + pub fn read2( + out_pipe: ChildStdout, + err_pipe: ChildStderr, + data: &mut dyn FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + let mut out = Vec::new(); + let mut err = Vec::new(); + + let port = CompletionPort::new(1)?; + port.add_handle(0, &out_pipe)?; + port.add_handle(1, &err_pipe)?; + + unsafe { + let mut out_pipe = Pipe::new(out_pipe, &mut out); + let mut err_pipe = Pipe::new(err_pipe, &mut err); + + out_pipe.read()?; + err_pipe.read()?; + + let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; + + while !out_pipe.done || !err_pipe.done { + for status in port.get_many(&mut status, None)? { + if status.token() == 0 { + out_pipe.complete(status); + data(true, out_pipe.dst, out_pipe.done); + out_pipe.read()?; + } else { + err_pipe.complete(status); + data(false, err_pipe.dst, err_pipe.done); + err_pipe.read()?; + } + } + } + + Ok(()) + } + } + + impl<'a> Pipe<'a> { + unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { + Pipe { + dst, + pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), + overlapped: Overlapped::zero(), + done: false, + } + } + + unsafe fn read(&mut self) -> io::Result<()> { + let dst = slice_to_end(self.dst); + match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + Ok(_) => Ok(()), + Err(e) => { + if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { + self.done = true; + Ok(()) + } else { + Err(e) + } + } + } + } + + unsafe fn complete(&mut self, status: &CompletionStatus) { + let prev = self.dst.len(); + self.dst.set_len(prev + status.bytes_transferred() as usize); + if status.bytes_transferred() == 0 { + self.done = true; + } + } + } + + unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { + if v.capacity() == 0 { + v.reserve(16); + } + if v.capacity() == v.len() { + v.reserve(1); + } + slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) + } +} diff -Nru cargo-0.53.0/crates/cargo-util/src/sha256.rs cargo-0.54.0/crates/cargo-util/src/sha256.rs --- cargo-0.53.0/crates/cargo-util/src/sha256.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/sha256.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,56 @@ +use super::paths; +use anyhow::{Context, Result}; +use crypto_hash::{Algorithm, Hasher}; +use std::fs::File; +use std::io::{self, Read, Write}; +use std::path::Path; + +pub struct Sha256(Hasher); + +impl Sha256 { + pub fn new() -> Sha256 { + let hasher = Hasher::new(Algorithm::SHA256); + Sha256(hasher) + } + + pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 { + let _ = self.0.write_all(bytes); + self + } + + pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> { + let mut buf = [0; 64 * 1024]; + loop { + let n = file.read(&mut buf)?; + if n == 0 { + break Ok(self); + } + self.update(&buf[..n]); + } + } + + pub fn update_path>(&mut self, path: P) -> Result<&mut Sha256> { + let path = path.as_ref(); + let file = paths::open(path)?; + self.update_file(&file) + .with_context(|| format!("failed to read `{}`", path.display()))?; + Ok(self) + } + + pub fn finish(&mut self) -> [u8; 32] { + let mut ret = [0u8; 32]; + let data = self.0.finish(); + ret.copy_from_slice(&data[..]); + ret + } + + pub fn finish_hex(&mut self) -> String { + hex::encode(self.finish()) + } +} + +impl Default for Sha256 { + fn default() -> Self { + Self::new() + } +} diff -Nru cargo-0.53.0/crates/crates-io/lib.rs cargo-0.54.0/crates/crates-io/lib.rs --- cargo-0.53.0/crates/crates-io/lib.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/crates-io/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,4 @@ -#![allow(unknown_lints)] -#![allow(clippy::identity_op)] // used for vertical alignment +#![allow(clippy::all)] use std::collections::BTreeMap; use std::fmt; diff -Nru cargo-0.53.0/crates/resolver-tests/Cargo.toml cargo-0.54.0/crates/resolver-tests/Cargo.toml --- cargo-0.53.0/crates/resolver-tests/Cargo.toml 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/resolver-tests/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -6,6 +6,7 @@ [dependencies] cargo = { path = "../.." } +cargo-util = { path = "../cargo-util" } proptest = "0.9.1" lazy_static = "1.3.0" varisat = "0.2.1" diff -Nru cargo-0.53.0/crates/resolver-tests/src/lib.rs cargo-0.54.0/crates/resolver-tests/src/lib.rs --- cargo-0.53.0/crates/resolver-tests/src/lib.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/resolver-tests/src/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,4 @@ -#![allow(clippy::many_single_char_names)] -#![allow(clippy::needless_range_loop)] // false positives +#![allow(clippy::all)] use std::cell::RefCell; use std::cmp::PartialEq; @@ -123,7 +122,7 @@ struct MyRegistry<'a> { list: &'a [Summary], used: HashSet, - }; + } impl<'a> Registry for MyRegistry<'a> { fn query( &mut self, diff -Nru cargo-0.53.0/crates/resolver-tests/tests/resolve.rs cargo-0.54.0/crates/resolver-tests/tests/resolve.rs --- cargo-0.53.0/crates/resolver-tests/tests/resolve.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/crates/resolver-tests/tests/resolve.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,7 @@ use cargo::core::dependency::DepKind; use cargo::core::Dependency; -use cargo::util::{is_ci, Config}; +use cargo::util::Config; +use cargo_util::is_ci; use resolver_tests::{ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names, diff -Nru cargo-0.53.0/debian/changelog cargo-0.54.0/debian/changelog --- cargo-0.53.0/debian/changelog 2021-10-26 00:42:20.000000000 +0000 +++ cargo-0.54.0/debian/changelog 2021-10-29 11:12:15.000000000 +0000 @@ -1,9 +1,19 @@ -cargo (0.53.0-0ubuntu2~21.04.1) hirsute; urgency=medium +cargo (0.54.0-0ubuntu1~21.04.1) hirsute; urgency=medium + [ Michael Hudson-Doyle ] * Backport to Hirsute. (LP: #1943842) * Drop change to -march on armhf. - -- Michael Hudson-Doyle Tue, 26 Oct 2021 13:42:20 +1300 + -- Olivier Tilloy Fri, 29 Oct 2021 13:12:15 +0200 + +cargo (0.54.0-0ubuntu1) UNRELEASED; urgency=medium + + * Update debian/watch file to account for GitHub changes. + * New upstream version. + * Update patches. + * Cherry pick 2112-handle-4-siphasher-algorithms.patch from Debian. + + -- Michael Hudson-Doyle Wed, 27 Oct 2021 10:18:02 +1300 cargo (0.53.0-0ubuntu2) jammy; urgency=medium diff -Nru cargo-0.53.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch cargo-0.54.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch --- cargo-0.53.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch 2021-10-25 20:30:41.000000000 +0000 +++ cargo-0.54.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch 2021-10-28 19:57:51.000000000 +0000 @@ -14,7 +14,7 @@ --- a/Cargo.toml +++ b/Cargo.toml -@@ -57,7 +57,7 @@ +@@ -56,7 +56,7 @@ serde_json = { version = "1.0.30", features = ["raw_value"] } shell-escape = "0.1.4" strip-ansi-escapes = "0.1.0" @@ -25,7 +25,7 @@ toml = "0.5.7" --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs -@@ -525,6 +525,8 @@ +@@ -524,6 +524,8 @@ header.set_entry_type(EntryType::file()); header.set_mode(0o644); header.set_size(contents.len() as u64); @@ -33,7 +33,7 @@ + header.set_mtime(1); header.set_cksum(); ar.append_data(&mut header, &ar_path, contents.as_bytes()) - .chain_err(|| format!("could not archive source file `{}`", rel_str))?; + .with_context(|| format!("could not archive source file `{}`", rel_str))?; --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -1947,9 +1947,10 @@ diff -Nru cargo-0.53.0/debian/patches/2112-handle-4-siphasher-algorithms.patch cargo-0.54.0/debian/patches/2112-handle-4-siphasher-algorithms.patch --- cargo-0.53.0/debian/patches/2112-handle-4-siphasher-algorithms.patch 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/debian/patches/2112-handle-4-siphasher-algorithms.patch 2021-10-28 19:58:51.000000000 +0000 @@ -0,0 +1,19 @@ +Bug: https://github.com/rust-lang/cargo/issues/10004 + +--- a/src/cargo/core/source/source_id.rs ++++ b/src/cargo/core/source/source_id.rs +@@ -584,7 +584,13 @@ + fn test_cratesio_hash() { + let config = Config::default().unwrap(); + let crates_io = SourceId::crates_io(&config).unwrap(); +- assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); ++ assert!([ ++ "1ecc6299db9ec823", // 64 LE ++ "1285ae84e5963aae", // 32 LE ++ "eae4ba8cbf2ce1c7", // 64 BE ++ "b420f105fcaca6de", // 32 BE ++ ] ++ .contains(&crate::util::hex::short_hash(&crates_io).as_str())); + } + + /// A `Display`able view into a `SourceId` that will write it as a url diff -Nru cargo-0.53.0/debian/patches/series cargo-0.54.0/debian/patches/series --- cargo-0.53.0/debian/patches/series 2021-10-26 00:40:40.000000000 +0000 +++ cargo-0.54.0/debian/patches/series 2021-10-29 11:12:06.000000000 +0000 @@ -4,3 +4,4 @@ skip-filter_platform-non-amd64.patch 0001-Update-tar-dependency-to-0.4.34.patch disable-lto-test_profiele.patch +2112-handle-4-siphasher-algorithms.patch diff -Nru cargo-0.53.0/debian/watch cargo-0.54.0/debian/watch --- cargo-0.53.0/debian/watch 2021-10-25 20:30:41.000000000 +0000 +++ cargo-0.54.0/debian/watch 2021-10-28 19:57:51.000000000 +0000 @@ -1,2 +1,4 @@ -version=3 -https://github.com/rust-lang/cargo/releases /rust-lang/cargo/archive.*/(\d+\.\d+\.\d+)\.tar\.gz +version=4 +opts="filenamemangle=s%(?:.*?)?v?(\d[\d.]*)\.tar\.gz%cargo-$1.tar.gz%" \ + https://github.com/rust-lang/cargo/tags \ + (?:.*?/)?v?(\d[\d.]*)\.tar\.gz diff -Nru cargo-0.53.0/.github/workflows/main.yml cargo-0.54.0/.github/workflows/main.yml --- cargo-0.53.0/.github/workflows/main.yml 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/.github/workflows/main.yml 2021-04-27 14:35:53.000000000 +0000 @@ -66,6 +66,7 @@ - run: cargo test --features 'deny-warnings' - run: cargo test --features 'deny-warnings' -p cargo-test-support - run: cargo test -p cargo-platform + - run: cargo test -p cargo-util - run: cargo test --manifest-path crates/mdman/Cargo.toml - run: cargo build --manifest-path crates/credential/cargo-credential-1password/Cargo.toml - run: cargo build --manifest-path crates/credential/cargo-credential-gnome-secret/Cargo.toml diff -Nru cargo-0.53.0/publish.py cargo-0.54.0/publish.py --- cargo-0.53.0/publish.py 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/publish.py 2021-04-27 14:35:53.000000000 +0000 @@ -12,6 +12,7 @@ TO_PUBLISH = [ 'crates/cargo-platform', + 'crates/cargo-util', 'crates/crates-io', '.', ] diff -Nru cargo-0.53.0/src/bin/cargo/cli.rs cargo-0.54.0/src/bin/cargo/cli.rs --- cargo-0.53.0/src/bin/cargo/cli.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/cli.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,11 @@ -use cargo::core::features; +use cargo::core::{features, CliUnstable}; use cargo::{self, drop_print, drop_println, CliResult, Config}; use clap::{AppSettings, Arg, ArgMatches}; use super::commands; use super::list_commands; use crate::command_prelude::*; +use cargo::core::features::HIDDEN; pub fn main(config: &mut Config) -> CliResult { // CAUTION: Be careful with using `config` until it is configured below. @@ -30,24 +31,38 @@ }; if args.value_of("unstable-features") == Some("help") { + let options = CliUnstable::help(); + let non_hidden_options: Vec<(String, String)> = options + .iter() + .filter(|(_, help_message)| *help_message != HIDDEN) + .map(|(name, help)| (name.to_string(), help.to_string())) + .collect(); + let longest_option = non_hidden_options + .iter() + .map(|(option_name, _)| option_name.len()) + .max() + .unwrap_or(0); + let help_lines: Vec = non_hidden_options + .iter() + .map(|(option_name, option_help_message)| { + let option_name_kebab_case = option_name.replace("_", "-"); + let padding = " ".repeat(longest_option - option_name.len()); // safe to substract + format!( + " -Z {}{} -- {}", + option_name_kebab_case, padding, option_help_message + ) + }) + .collect(); + let joined = help_lines.join("\n"); drop_println!( config, " Available unstable (nightly-only) flags: - -Z avoid-dev-deps -- Avoid installing dev-dependencies if possible - -Z extra-link-arg -- Allow `cargo:rustc-link-arg` in build scripts - -Z minimal-versions -- Install minimal dependency versions instead of maximum - -Z no-index-update -- Do not update the registry, avoids a network request for benchmarking - -Z unstable-options -- Allow the usage of unstable options - -Z timings -- Display concurrency information - -Z doctest-xcompile -- Compile and run doctests for non-host target using runner config - -Z terminal-width -- Provide a terminal width to rustc for error truncation - -Z namespaced-features -- Allow features with `dep:` prefix - -Z weak-dep-features -- Allow `dep_name?/feature` feature syntax - -Z patch-in-config -- Allow `[patch]` sections in .cargo/config.toml files +{} -Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" +Run with 'cargo -Z [FLAG] [SUBCOMMAND]'", + joined ); if !config.nightly_features_allowed { drop_println!( diff -Nru cargo-0.53.0/src/bin/cargo/commands/config.rs cargo-0.54.0/src/bin/cargo/commands/config.rs --- cargo-0.53.0/src/bin/cargo/commands/config.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,48 @@ +use crate::command_prelude::*; +use cargo::ops::cargo_config; + +pub fn cli() -> App { + subcommand("config") + .about("Inspect configuration values") + .after_help("Run `cargo help config` for more detailed information.\n") + .setting(clap::AppSettings::SubcommandRequiredElseHelp) + .subcommand( + subcommand("get") + .arg(Arg::with_name("key").help("The config key to display")) + .arg( + opt("format", "Display format") + .possible_values(cargo_config::ConfigFormat::POSSIBLE_VALUES) + .default_value("toml"), + ) + .arg(opt( + "show-origin", + "Display where the config value is defined", + )) + .arg( + opt("merged", "Whether or not to merge config values") + .possible_values(&["yes", "no"]) + .default_value("yes"), + ), + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + config + .cli_unstable() + .fail_if_stable_command(config, "config", 9301)?; + match args.subcommand() { + ("get", Some(args)) => { + let opts = cargo_config::GetOptions { + key: args.value_of("key"), + format: args.value_of("format").unwrap().parse()?, + show_origin: args.is_present("show-origin"), + merged: args.value_of("merged") == Some("yes"), + }; + cargo_config::get(config, &opts)?; + } + (cmd, _) => { + panic!("unexpected command `{}`", cmd) + } + } + Ok(()) +} diff -Nru cargo-0.53.0/src/bin/cargo/commands/describe_future_incompatibilities.rs cargo-0.54.0/src/bin/cargo/commands/describe_future_incompatibilities.rs --- cargo-0.53.0/src/bin/cargo/commands/describe_future_incompatibilities.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/describe_future_incompatibilities.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,7 @@ use crate::command_prelude::*; -use anyhow::anyhow; +use anyhow::{anyhow, Context as _}; use cargo::core::compiler::future_incompat::{OnDiskReport, FUTURE_INCOMPAT_FILE}; use cargo::drop_eprint; -use cargo::util::CargoResultExt; use std::io::Read; pub fn cli() -> App { @@ -37,9 +36,9 @@ report_file .file() .read_to_string(&mut file_contents) - .chain_err(|| "failed to read report")?; + .with_context(|| "failed to read report")?; let on_disk_report: OnDiskReport = - serde_json::from_str(&file_contents).chain_err(|| "failed to load report")?; + serde_json::from_str(&file_contents).with_context(|| "failed to load report")?; let id = args.value_of("id").unwrap(); if id != on_disk_report.id { diff -Nru cargo-0.53.0/src/bin/cargo/commands/help.rs cargo-0.54.0/src/bin/cargo/commands/help.rs --- cargo-0.53.0/src/bin/cargo/commands/help.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/help.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,7 @@ use crate::aliased_command; use cargo::util::errors::CargoResult; -use cargo::util::paths::resolve_executable; use cargo::Config; +use cargo_util::paths::resolve_executable; use flate2::read::GzDecoder; use std::ffi::OsString; use std::io::Read; @@ -56,16 +56,16 @@ Some(man) => man, None => return Ok(false), }; - write_and_spawn(&man, "man")?; + write_and_spawn(&subcommand, &man, "man")?; } else { let txt = match extract_man(&subcommand, "txt") { Some(txt) => txt, None => return Ok(false), }; if resolve_executable(Path::new("less")).is_ok() { - write_and_spawn(&txt, "less")?; + write_and_spawn(&subcommand, &txt, "less")?; } else if resolve_executable(Path::new("more")).is_ok() { - write_and_spawn(&txt, "more")?; + write_and_spawn(&subcommand, &txt, "more")?; } else { drop(std::io::stdout().write_all(&txt)); } @@ -117,13 +117,20 @@ /// Write the contents of a man page to disk and spawn the given command to /// display it. -fn write_and_spawn(contents: &[u8], command: &str) -> CargoResult<()> { - let mut tmp = tempfile::Builder::new().prefix("cargo-man").tempfile()?; +fn write_and_spawn(name: &str, contents: &[u8], command: &str) -> CargoResult<()> { + let prefix = format!("cargo-{}.", name); + let mut tmp = tempfile::Builder::new().prefix(&prefix).tempfile()?; let f = tmp.as_file_mut(); f.write_all(contents)?; f.flush()?; + let path = tmp.path(); + // Use a path relative to the temp directory so that it can work on + // cygwin/msys systems which don't handle windows-style paths. + let mut relative_name = std::ffi::OsString::from("./"); + relative_name.push(path.file_name().unwrap()); let mut cmd = std::process::Command::new(command) - .arg(tmp.path()) + .arg(relative_name) + .current_dir(path.parent().unwrap()) .spawn()?; drop(cmd.wait()); Ok(()) diff -Nru cargo-0.53.0/src/bin/cargo/commands/logout.rs cargo-0.54.0/src/bin/cargo/commands/logout.rs --- cargo-0.53.0/src/bin/cargo/commands/logout.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/logout.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,4 @@ use crate::command_prelude::*; -use anyhow::format_err; -use cargo::core::features; use cargo::ops; pub fn cli() -> App { @@ -12,29 +10,10 @@ } pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let unstable = config.cli_unstable(); - if !(unstable.credential_process || unstable.unstable_options) { - const SEE: &str = "See https://github.com/rust-lang/cargo/issues/8933 for more \ - information about the `cargo logout` command."; - if config.nightly_features_allowed { - return Err(format_err!( - "the `cargo logout` command is unstable, pass `-Z unstable-options` to enable it\n\ - {}", - SEE - ) - .into()); - } else { - return Err(format_err!( - "the `cargo logout` command is unstable, and only available on the \ - nightly channel of Cargo, but this is the `{}` channel\n\ - {}\n\ - {}", - features::channel(), - features::SEE_CHANNELS, - SEE - ) - .into()); - } + if !config.cli_unstable().credential_process { + config + .cli_unstable() + .fail_if_stable_command(config, "logout", 8933)?; } config.load_credentials()?; ops::registry_logout(config, args.value_of("registry").map(String::from))?; diff -Nru cargo-0.53.0/src/bin/cargo/commands/metadata.rs cargo-0.54.0/src/bin/cargo/commands/metadata.rs --- cargo-0.53.0/src/bin/cargo/commands/metadata.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/metadata.rs 2021-04-27 14:35:53.000000000 +0000 @@ -44,9 +44,7 @@ }; let options = OutputMetadataOptions { - features: values(args, "features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, no_deps: args.is_present("no-deps"), filter_platforms: args._values_of("filter-platform"), version, diff -Nru cargo-0.53.0/src/bin/cargo/commands/mod.rs cargo-0.54.0/src/bin/cargo/commands/mod.rs --- cargo-0.53.0/src/bin/cargo/commands/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -6,6 +6,7 @@ build::cli(), check::cli(), clean::cli(), + config::cli(), describe_future_incompatibilities::cli(), doc::cli(), fetch::cli(), @@ -45,6 +46,7 @@ "build" => build::exec, "check" => check::exec, "clean" => clean::exec, + "config" => config::exec, "describe-future-incompatibilities" => describe_future_incompatibilities::exec, "doc" => doc::exec, "fetch" => fetch::exec, @@ -84,6 +86,7 @@ pub mod build; pub mod check; pub mod clean; +pub mod config; pub mod describe_future_incompatibilities; pub mod doc; pub mod fetch; diff -Nru cargo-0.53.0/src/bin/cargo/commands/package.rs cargo-0.54.0/src/bin/cargo/commands/package.rs --- cargo-0.53.0/src/bin/cargo/commands/package.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/package.rs 2021-04-27 14:35:53.000000000 +0000 @@ -45,9 +45,7 @@ allow_dirty: args.is_present("allow-dirty"), targets: args.targets(), jobs: args.jobs()?, - features: args._values_of("features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, }, )?; Ok(()) diff -Nru cargo-0.53.0/src/bin/cargo/commands/publish.rs cargo-0.54.0/src/bin/cargo/commands/publish.rs --- cargo-0.53.0/src/bin/cargo/commands/publish.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/publish.rs 2021-04-27 14:35:53.000000000 +0000 @@ -45,9 +45,7 @@ jobs: args.jobs()?, dry_run: args.is_present("dry-run"), registry, - features: args._values_of("features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, }, )?; Ok(()) diff -Nru cargo-0.53.0/src/bin/cargo/commands/run.rs cargo-0.54.0/src/bin/cargo/commands/run.rs --- cargo-0.53.0/src/bin/cargo/commands/run.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/run.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,8 @@ use crate::command_prelude::*; use crate::util::restricted_names::is_glob_pattern; -use crate::util::ProcessError; use cargo::core::Verbosity; use cargo::ops::{self, CompileFilter, Packages}; +use cargo_util::ProcessError; pub fn cli() -> App { subcommand("run") diff -Nru cargo-0.53.0/src/bin/cargo/commands/test.rs cargo-0.54.0/src/bin/cargo/commands/test.rs --- cargo-0.53.0/src/bin/cargo/commands/test.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/test.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,6 @@ use crate::command_prelude::*; use anyhow::Error; use cargo::ops::{self, CompileFilter, FilterRule, LibRule}; -use cargo::util::errors; pub fn cli() -> App { subcommand("test") @@ -128,7 +127,7 @@ let context = anyhow::format_err!("{}", err.hint(&ws, &ops.compile_opts)); let e = match err.code { // Don't show "process didn't exit successfully" for simple errors. - Some(i) if errors::is_simple_exit_code(i) => CliError::new(context, i), + Some(i) if cargo_util::is_simple_exit_code(i) => CliError::new(context, i), Some(i) => CliError::new(Error::from(err).context(context), i), None => CliError::new(Error::from(err).context(context), 101), }; diff -Nru cargo-0.53.0/src/bin/cargo/commands/tree.rs cargo-0.54.0/src/bin/cargo/commands/tree.rs --- cargo-0.53.0/src/bin/cargo/commands/tree.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/tree.rs 2021-04-27 14:35:53.000000000 +0000 @@ -190,9 +190,7 @@ let charset = tree::Charset::from_str(args.value_of("charset").unwrap()) .map_err(|e| anyhow::anyhow!("{}", e))?; let opts = tree::TreeOptions { - features: values(args, "features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, packages, target, edge_kinds, diff -Nru cargo-0.53.0/src/bin/cargo/main.rs cargo-0.54.0/src/bin/cargo/main.rs --- cargo-0.53.0/src/bin/cargo/main.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/main.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,17 +1,17 @@ #![warn(rust_2018_idioms)] // while we're getting used to 2018 -#![allow(clippy::redundant_closure)] // there's a false positive +#![allow(clippy::all)] #![warn(clippy::needless_borrow)] #![warn(clippy::redundant_clone)] +use cargo::core::shell::Shell; +use cargo::util::CliError; +use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; +use cargo_util::{ProcessBuilder, ProcessError}; use std::collections::{BTreeMap, BTreeSet}; use std::env; use std::fs; use std::path::{Path, PathBuf}; -use cargo::core::shell::Shell; -use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; -use cargo::util::{CliError, ProcessError}; - mod cli; mod commands; @@ -159,7 +159,7 @@ }; let cargo_exe = config.cargo_exe()?; - let err = match util::process(&command) + let err = match ProcessBuilder::new(&command) .env(cargo::CARGO_ENV, cargo_exe) .args(args) .exec_replace() diff -Nru cargo-0.53.0/src/cargo/core/compiler/build_config.rs cargo-0.54.0/src/cargo/core/compiler/build_config.rs --- cargo-0.53.0/src/cargo/core/compiler/build_config.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,8 @@ use crate::core::compiler::CompileKind; use crate::util::interning::InternedString; -use crate::util::ProcessBuilder; use crate::util::{CargoResult, Config, RustfixDiagnosticServer}; use anyhow::bail; +use cargo_util::ProcessBuilder; use serde::ser; use std::cell::RefCell; use std::path::PathBuf; diff -Nru cargo-0.53.0/src/cargo/core/compiler/build_context/mod.rs cargo-0.54.0/src/cargo/core/compiler/build_context/mod.rs --- cargo-0.53.0/src/cargo/core/compiler/build_context/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_context/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -11,7 +11,9 @@ use std::path::PathBuf; mod target_info; -pub use self::target_info::{FileFlavor, FileType, RustcTargetData, TargetInfo}; +pub use self::target_info::{ + FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, +}; /// The build context, containing all information about a build task. /// @@ -37,7 +39,7 @@ pub packages: PackageSet<'cfg>, /// Information about rustc and the target platform. - pub target_data: RustcTargetData, + pub target_data: RustcTargetData<'cfg>, /// The root units of `unit_graph` (units requested on the command-line). pub roots: Vec, @@ -56,7 +58,7 @@ build_config: &'a BuildConfig, profiles: Profiles, extra_compiler_args: HashMap>, - target_data: RustcTargetData, + target_data: RustcTargetData<'cfg>, roots: Vec, unit_graph: UnitGraph, ) -> CargoResult> { diff -Nru cargo-0.53.0/src/cargo/core/compiler/build_context/target_info.rs cargo-0.54.0/src/cargo/core/compiler/build_context/target_info.rs --- cargo-0.53.0/src/cargo/core/compiler/build_context/target_info.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_context/target_info.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,12 +1,17 @@ -use crate::core::compiler::{BuildOutput, CompileKind, CompileMode, CompileTarget, CrateType}; +use crate::core::compiler::{ + BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType, +}; use crate::core::{Dependency, Target, TargetKind, Workspace}; use crate::util::config::{Config, StringList, TargetConfig}; -use crate::util::{CargoResult, CargoResultExt, ProcessBuilder, Rustc}; +use crate::util::{CargoResult, Rustc}; +use anyhow::Context as _; use cargo_platform::{Cfg, CfgExpr}; +use cargo_util::{paths, ProcessBuilder}; +use serde::{Deserialize, Serialize}; use std::cell::RefCell; use std::collections::hash_map::{Entry, HashMap}; use std::env; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::str::{self, FromStr}; /// Information about the platform target gleaned from querying rustc. @@ -135,7 +140,7 @@ "RUSTFLAGS", )?; let extra_fingerprint = kind.fingerprint_hash(); - let mut process = rustc.process(); + let mut process = rustc.workspace_process(); process .arg("-") .arg("--crate-name") @@ -172,7 +177,7 @@ let (output, error) = rustc .cached_output(&process, extra_fingerprint) - .chain_err(|| "failed to run `rustc` to learn about target-specific information")?; + .with_context(|| "failed to run `rustc` to learn about target-specific information")?; let mut lines = output.lines(); let mut map = HashMap::new(); @@ -208,7 +213,7 @@ .map(|line| Ok(Cfg::from_str(line)?)) .filter(TargetInfo::not_user_specific_cfg) .collect::>>() - .chain_err(|| { + .with_context(|| { format!( "failed to parse the cfg from `rustc --print=cfg`, got:\n{}", output @@ -409,7 +414,7 @@ process.arg("--crate-type").arg(crate_type.as_str()); - let output = process.exec_with_output().chain_err(|| { + let output = process.exec_with_output().with_context(|| { format!( "failed to run `rustc` to learn about crate-type {} information", crate_type @@ -650,9 +655,14 @@ } /// Collection of information about `rustc` and the host and target. -pub struct RustcTargetData { +pub struct RustcTargetData<'cfg> { /// Information about `rustc` itself. pub rustc: Rustc, + + /// Config + config: &'cfg Config, + requested_kinds: Vec, + /// Build information for the "host", which is information about when /// `rustc` is invoked without a `--target` flag. This is used for /// procedural macros, build scripts, etc. @@ -665,27 +675,17 @@ target_info: HashMap, } -impl RustcTargetData { +impl<'cfg> RustcTargetData<'cfg> { pub fn new( - ws: &Workspace<'_>, + ws: &Workspace<'cfg>, requested_kinds: &[CompileKind], - ) -> CargoResult { + ) -> CargoResult> { let config = ws.config(); let rustc = config.load_global_rustc(Some(ws))?; let host_config = config.target_cfg_triple(&rustc.host)?; let host_info = TargetInfo::new(config, requested_kinds, &rustc, CompileKind::Host)?; let mut target_config = HashMap::new(); let mut target_info = HashMap::new(); - for kind in requested_kinds { - if let CompileKind::Target(target) = *kind { - let tcfg = config.target_cfg_triple(target.short_name())?; - target_config.insert(target, tcfg); - target_info.insert( - target, - TargetInfo::new(config, requested_kinds, &rustc, *kind)?, - ); - } - } // This is a hack. The unit_dependency graph builder "pretends" that // `CompileKind::Host` is `CompileKind::Target(host)` if the @@ -698,13 +698,49 @@ target_config.insert(ct, host_config.clone()); } - Ok(RustcTargetData { + let mut res = RustcTargetData { rustc, + config, + requested_kinds: requested_kinds.into(), host_config, host_info, target_config, target_info, - }) + }; + + // Get all kinds we currently know about. + // + // For now, targets can only ever come from the root workspace + // units as artifact dependencies are not a thing yet, so this + // correctly represents all the kinds that can happen. When we + // have artifact dependencies or other ways for targets to + // appear at places that are not the root units, we may have + // to revisit this. + let all_kinds = requested_kinds + .iter() + .copied() + .chain(ws.members().flat_map(|p| { + p.manifest() + .default_kind() + .into_iter() + .chain(p.manifest().forced_kind()) + })); + for kind in all_kinds { + if let CompileKind::Target(target) = kind { + if !res.target_config.contains_key(&target) { + res.target_config + .insert(target, res.config.target_cfg_triple(target.short_name())?); + } + if !res.target_info.contains_key(&target) { + res.target_info.insert( + target, + TargetInfo::new(res.config, &res.requested_kinds, &res.rustc, kind)?, + ); + } + } + } + + Ok(res) } /// Returns a "short" name for the given kind, suitable for keying off @@ -758,3 +794,98 @@ self.target_config(kind).links_overrides.get(lib_name) } } + +/// Structure used to deal with Rustdoc fingerprinting +#[derive(Debug, Serialize, Deserialize)] +pub struct RustDocFingerprint { + pub rustc_vv: String, +} + +impl RustDocFingerprint { + /// This function checks whether the latest version of `Rustc` used to compile this + /// `Workspace`'s docs was the same as the one is currently being used in this `cargo doc` + /// call. + /// + /// In case it's not, it takes care of removing the `doc/` folder as well as overwriting + /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed + /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have + /// any versioning. + pub fn check_rustdoc_fingerprint(cx: &Context<'_, '_>) -> CargoResult<()> { + if cx.bcx.config.cli_unstable().skip_rustdoc_fingerprint { + return Ok(()); + } + let actual_rustdoc_target_data = RustDocFingerprint { + rustc_vv: cx.bcx.rustc().verbose_version.clone(), + }; + + let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json"); + let write_fingerprint = || -> CargoResult<()> { + paths::write( + &fingerprint_path, + serde_json::to_string(&actual_rustdoc_target_data)?, + ) + }; + let rustdoc_data = match paths::read(&fingerprint_path) { + Ok(rustdoc_data) => rustdoc_data, + // If the fingerprint does not exist, do not clear out the doc + // directories. Otherwise this ran into problems where projects + // like rustbuild were creating the doc directory before running + // `cargo doc` in a way that deleting it would break it. + Err(_) => return write_fingerprint(), + }; + match serde_json::from_str::(&rustdoc_data) { + Ok(fingerprint) => { + if fingerprint.rustc_vv == actual_rustdoc_target_data.rustc_vv { + return Ok(()); + } else { + log::debug!( + "doc fingerprint changed:\noriginal:\n{}\nnew:\n{}", + fingerprint.rustc_vv, + actual_rustdoc_target_data.rustc_vv + ); + } + } + Err(e) => { + log::debug!("could not deserialize {:?}: {}", fingerprint_path, e); + } + }; + // Fingerprint does not match, delete the doc directories and write a new fingerprint. + log::debug!( + "fingerprint {:?} mismatch, clearing doc directories", + fingerprint_path + ); + cx.bcx + .all_kinds + .iter() + .map(|kind| cx.files().layout(*kind).doc()) + .filter(|path| path.exists()) + .try_for_each(|path| clean_doc(path))?; + write_fingerprint()?; + return Ok(()); + + fn clean_doc(path: &Path) -> CargoResult<()> { + let entries = path + .read_dir() + .with_context(|| format!("failed to read directory `{}`", path.display()))?; + for entry in entries { + let entry = entry?; + // Don't remove hidden files. Rustdoc does not create them, + // but the user might have. + if entry + .file_name() + .to_str() + .map_or(false, |name| name.starts_with('.')) + { + continue; + } + let path = entry.path(); + if entry.file_type()?.is_dir() { + paths::remove_dir_all(path)?; + } else { + paths::remove_file(path)?; + } + } + Ok(()) + } + } +} diff -Nru cargo-0.53.0/src/cargo/core/compiler/build_plan.rs cargo-0.54.0/src/cargo/core/compiler/build_plan.rs --- cargo-0.53.0/src/cargo/core/compiler/build_plan.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_plan.rs 2021-04-27 14:35:53.000000000 +0000 @@ -14,7 +14,8 @@ use super::context::OutputFile; use super::{CompileKind, CompileMode, Context, Unit}; use crate::core::TargetKind; -use crate::util::{internal, CargoResult, Config, ProcessBuilder}; +use crate::util::{internal, CargoResult, Config}; +use cargo_util::ProcessBuilder; #[derive(Debug, Serialize)] struct Invocation { diff -Nru cargo-0.53.0/src/cargo/core/compiler/compilation.rs cargo-0.54.0/src/cargo/core/compiler/compilation.rs --- cargo-0.53.0/src/cargo/core/compiler/compilation.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/compilation.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,12 +4,13 @@ use std::path::PathBuf; use cargo_platform::CfgExpr; +use cargo_util::{paths, ProcessBuilder}; use semver::Version; use super::BuildContext; use crate::core::compiler::{CompileKind, Metadata, Unit}; use crate::core::Package; -use crate::util::{self, config, join_paths, process, CargoResult, Config, ProcessBuilder}; +use crate::util::{config, CargoResult, Config}; /// Structure with enough information to run `rustdoc --test`. pub struct Doctest { @@ -126,10 +127,10 @@ sysroot_target_libdir: bcx .all_kinds .iter() - .map(|kind| { + .map(|&kind| { ( - *kind, - bcx.target_data.info(*kind).sysroot_target_libdir.clone(), + kind, + bcx.target_data.info(kind).sysroot_target_libdir.clone(), ) }) .collect(), @@ -184,7 +185,7 @@ unit: &Unit, script_meta: Option, ) -> CargoResult { - let rustdoc = process(&*self.config.rustdoc()?); + let rustdoc = ProcessBuilder::new(&*self.config.rustdoc()?); let cmd = fill_rustc_tool_env(rustdoc, unit); let mut p = self.fill_env(cmd, &unit.pkg, script_meta, unit.kind, true)?; unit.target.edition().cmd_edition_arg(&mut p); @@ -207,7 +208,13 @@ cmd: T, pkg: &Package, ) -> CargoResult { - self.fill_env(process(cmd), pkg, None, CompileKind::Host, false) + self.fill_env( + ProcessBuilder::new(cmd), + pkg, + None, + CompileKind::Host, + false, + ) } pub fn target_runner(&self, kind: CompileKind) -> Option<&(PathBuf, Vec)> { @@ -229,12 +236,12 @@ script_meta: Option, ) -> CargoResult { let builder = if let Some((runner, args)) = self.target_runner(kind) { - let mut builder = process(runner); + let mut builder = ProcessBuilder::new(runner); builder.args(args); builder.arg(cmd); builder } else { - process(cmd) + ProcessBuilder::new(cmd) }; self.fill_env(builder, pkg, script_meta, kind, false) } @@ -272,7 +279,7 @@ } } - let dylib_path = util::dylib_path(); + let dylib_path = paths::dylib_path(); let dylib_path_is_empty = dylib_path.is_empty(); search_path.extend(dylib_path.into_iter()); if cfg!(target_os = "macos") && dylib_path_is_empty { @@ -285,9 +292,9 @@ search_path.push(PathBuf::from("/usr/local/lib")); search_path.push(PathBuf::from("/usr/lib")); } - let search_path = join_paths(&search_path, util::dylib_path_envvar())?; + let search_path = paths::join_paths(&search_path, paths::dylib_path_envvar())?; - cmd.env(util::dylib_path_envvar(), &search_path); + cmd.env(paths::dylib_path_envvar(), &search_path); if let Some(meta) = script_meta { if let Some(env) = self.extra_env.get(&meta) { for (k, v) in env { diff -Nru cargo-0.53.0/src/cargo/core/compiler/compile_kind.rs cargo-0.54.0/src/cargo/core/compiler/compile_kind.rs --- cargo-0.53.0/src/cargo/core/compiler/compile_kind.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/compile_kind.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,8 @@ use crate::core::Target; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{Config, StableHasher}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use serde::Serialize; use std::collections::BTreeSet; use std::fs; @@ -143,7 +143,7 @@ // with different paths always produce the same result. let path = Path::new(name) .canonicalize() - .chain_err(|| anyhow::format_err!("target path {:?} is not a valid file", name))?; + .with_context(|| format!("target path {:?} is not a valid file", name))?; let name = path .into_os_string() diff -Nru cargo-0.53.0/src/cargo/core/compiler/context/compilation_files.rs cargo-0.54.0/src/cargo/core/compiler/context/compilation_files.rs --- cargo-0.53.0/src/cargo/core/compiler/context/compilation_files.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/context/compilation_files.rs 2021-04-27 14:35:53.000000000 +0000 @@ -80,6 +80,17 @@ } } +/// Information about the metadata hashes used for a `Unit`. +struct MetaInfo { + /// The symbol hash to use. + meta_hash: Metadata, + /// Whether or not the `-C extra-filename` flag is used to generate unique + /// output filenames for this `Unit`. + /// + /// If this is `true`, the `meta_hash` is used for the filename. + use_extra_filename: bool, +} + /// Collection of information about the files emitted by the compiler, and the /// output directory structure. pub struct CompilationFiles<'a, 'cfg> { @@ -94,7 +105,7 @@ roots: Vec, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. - metas: HashMap>, + metas: HashMap, /// For each Unit, a list all files produced. outputs: HashMap>>>, } @@ -160,11 +171,14 @@ /// Gets the metadata for the given unit. /// /// See module docs for more details. - /// - /// Returns `None` if the unit should not use a metadata hash (like - /// rustdoc, or some dylibs). - pub fn metadata(&self, unit: &Unit) -> Option { - self.metas[unit] + pub fn metadata(&self, unit: &Unit) -> Metadata { + self.metas[unit].meta_hash + } + + /// Returns whether or not `-C extra-filename` is used to extend the + /// output filenames to make them unique. + pub fn use_extra_filename(&self, unit: &Unit) -> bool { + self.metas[unit].use_extra_filename } /// Gets the short hash based only on the `PackageId`. @@ -201,9 +215,11 @@ /// taken in those cases! fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); - match self.metas[unit] { - Some(ref meta) => format!("{}-{}", name, meta), - None => format!("{}-{}", name, self.target_short_hash(unit)), + let meta = &self.metas[unit]; + if meta.use_extra_filename { + format!("{}-{}", name, meta.meta_hash) + } else { + format!("{}-{}", name, self.target_short_hash(unit)) } } @@ -448,8 +464,9 @@ // Convert FileType to OutputFile. let mut outputs = Vec::new(); for file_type in file_types { - let meta = self.metadata(unit).map(|m| m.to_string()); - let path = out_dir.join(file_type.output_filename(&unit.target, meta.as_deref())); + let meta = &self.metas[unit]; + let meta_opt = meta.use_extra_filename.then(|| meta.meta_hash.to_string()); + let path = out_dir.join(file_type.output_filename(&unit.target, meta_opt.as_deref())); let hardlink = self.uplift_to(unit, &file_type, &path); let export_path = if unit.target.is_custom_build() { None @@ -471,11 +488,11 @@ } } -fn metadata_of( +fn metadata_of<'a>( unit: &Unit, cx: &Context<'_, '_>, - metas: &mut HashMap>, -) -> Option { + metas: &'a mut HashMap, +) -> &'a MetaInfo { if !metas.contains_key(unit) { let meta = compute_metadata(unit, cx, metas); metas.insert(unit.clone(), meta); @@ -483,18 +500,15 @@ metadata_of(&dep.unit, cx, metas); } } - metas[unit] + &metas[unit] } fn compute_metadata( unit: &Unit, cx: &Context<'_, '_>, - metas: &mut HashMap>, -) -> Option { + metas: &mut HashMap, +) -> MetaInfo { let bcx = &cx.bcx; - if !should_use_metadata(bcx, unit) { - return None; - } let mut hasher = StableHasher::new(); METADATA_VERSION.hash(&mut hasher); @@ -514,7 +528,7 @@ let mut deps_metadata = cx .unit_deps(unit) .iter() - .map(|dep| metadata_of(&dep.unit, cx, metas)) + .map(|dep| metadata_of(&dep.unit, cx, metas).meta_hash) .collect::>(); deps_metadata.sort(); deps_metadata.hash(&mut hasher); @@ -561,7 +575,10 @@ // with user dependencies. unit.is_std.hash(&mut hasher); - Some(Metadata(hasher.finish())) + MetaInfo { + meta_hash: Metadata(hasher.finish()), + use_extra_filename: should_use_metadata(bcx, unit), + } } fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { @@ -598,7 +615,7 @@ /// Returns whether or not this unit should use a metadata hash. fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { - if unit.mode.is_doc_test() { + if unit.mode.is_doc_test() || unit.mode.is_doc() { // Doc tests do not have metadata. return false; } diff -Nru cargo-0.53.0/src/cargo/core/compiler/context/mod.rs cargo-0.54.0/src/cargo/core/compiler/context/mod.rs --- cargo-0.53.0/src/cargo/core/compiler/context/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/context/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,13 +2,14 @@ use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; +use anyhow::Context as _; use filetime::FileTime; use jobserver::Client; use crate::core::compiler::compilation::{self, UnitOutput}; use crate::core::compiler::{self, Unit}; use crate::core::PackageId; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::profile; use super::build_plan::BuildPlan; @@ -18,7 +19,9 @@ use super::layout::Layout; use super::lto::Lto; use super::unit_graph::UnitDep; -use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor}; +use super::{ + BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint, +}; mod compilation_files; use self::compilation_files::CompilationFiles; @@ -94,7 +97,7 @@ Some(c) => c.clone(), None => { let client = Client::new(bcx.build_config.jobs as usize) - .chain_err(|| "failed to create jobserver")?; + .with_context(|| "failed to create jobserver")?; client.acquire_raw()?; client } @@ -133,6 +136,18 @@ custom_build::build_map(&mut self)?; self.check_collisions()?; + // We need to make sure that if there were any previous docs + // already compiled, they were compiled with the same Rustc version that we're currently + // using. Otherways we must remove the `doc/` folder and compile again forcing a rebuild. + // + // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have + // any versioning (See https://github.com/rust-lang/cargo/issues/8461). + // Therefore, we can end up with weird bugs and behaviours if we mix different + // versions of these files. + if self.bcx.build_config.mode.is_doc() { + RustDocFingerprint::check_rustdoc_fingerprint(&self)? + } + for unit in &self.bcx.roots { // Build up a list of pending jobs, each of which represent // compiling a particular package. No actual work is executed as @@ -310,11 +325,11 @@ self.files_mut() .host .prepare() - .chain_err(|| "couldn't prepare build directories")?; + .with_context(|| "couldn't prepare build directories")?; for target in self.files.as_mut().unwrap().target.values_mut() { target .prepare() - .chain_err(|| "couldn't prepare build directories")?; + .with_context(|| "couldn't prepare build directories")?; } let files = self.files.as_ref().unwrap(); @@ -376,9 +391,7 @@ /// Returns the metadata hash for a RunCustomBuild unit. pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { assert!(unit.mode.is_run_custom_build()); - self.files() - .metadata(unit) - .expect("build script should always have hash") + self.files().metadata(unit) } pub fn is_primary_package(&self, unit: &Unit) -> bool { @@ -545,11 +558,11 @@ pub fn new_jobserver(&mut self) -> CargoResult { let tokens = self.bcx.build_config.jobs as usize; - let client = Client::new(tokens).chain_err(|| "failed to create jobserver")?; + let client = Client::new(tokens).with_context(|| "failed to create jobserver")?; // Drain the client fully for i in 0..tokens { - client.acquire_raw().chain_err(|| { + client.acquire_raw().with_context(|| { format!( "failed to fully drain {}/{} token from jobserver at startup", i, tokens, diff -Nru cargo-0.53.0/src/cargo/core/compiler/custom_build.rs cargo-0.54.0/src/cargo/core/compiler/custom_build.rs --- cargo-0.53.0/src/cargo/core/compiler/custom_build.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/custom_build.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,10 +3,12 @@ use crate::core::compiler::context::Metadata; use crate::core::compiler::job_queue::JobState; use crate::core::{profiles::ProfileRoot, PackageId}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::machine_message::{self, Message}; -use crate::util::{self, internal, paths, profile}; +use crate::util::{internal, profile}; +use anyhow::Context as _; use cargo_platform::Cfg; +use cargo_util::paths; use std::collections::hash_map::{Entry, HashMap}; use std::collections::{BTreeSet, HashSet}; use std::path::{Path, PathBuf}; @@ -306,7 +308,7 @@ // If we have an old build directory, then just move it into place, // otherwise create it! paths::create_dir_all(&script_out_dir) - .chain_err(|| "failed to create script output directory for build command")?; + .with_context(|| "failed to create script output directory for build command")?; // For all our native lib dependencies, pick up their metadata to pass // along to this custom build command. We're also careful to augment our @@ -368,7 +370,7 @@ }, true, ) - .chain_err(|| format!("failed to run custom build command for `{}`", pkg_descr)); + .with_context(|| format!("failed to run custom build command for `{}`", pkg_descr)); if let Err(error) = output { insert_warnings_in_build_outputs( @@ -394,7 +396,7 @@ // modified in the middle of the build. paths::set_file_time_no_err(output_file, timestamp); paths::write(&err_file, &output.stderr)?; - paths::write(&root_output_file, util::path2bytes(&script_out_dir)?)?; + paths::write(&root_output_file, paths::path2bytes(&script_out_dir)?)?; let parsed_output = BuildOutput::parse( &output.stdout, library_name, @@ -866,7 +868,7 @@ let output_file = script_run_dir.join("output"); let prev_script_out_dir = paths::read_bytes(&root_output_file) - .and_then(|bytes| util::bytes2path(&bytes)) + .and_then(|bytes| paths::bytes2path(&bytes)) .unwrap_or_else(|_| script_out_dir.clone()); let extra_link_arg = cx.bcx.config.cli_unstable().extra_link_arg; diff -Nru cargo-0.53.0/src/cargo/core/compiler/fingerprint.rs cargo-0.54.0/src/cargo/core/compiler/fingerprint.rs --- cargo-0.53.0/src/cargo/core/compiler/fingerprint.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/fingerprint.rs 2021-04-27 14:35:53.000000000 +0000 @@ -321,7 +321,8 @@ use std::sync::{Arc, Mutex}; use std::time::SystemTime; -use anyhow::{bail, format_err}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::{paths, ProcessBuilder}; use filetime::FileTime; use log::{debug, info}; use serde::de; @@ -331,10 +332,10 @@ use crate::core::compiler::unit_graph::UnitDep; use crate::core::Package; use crate::util; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{internal, path_args, profile, ProcessBuilder}; +use crate::util::{internal, path_args, profile}; +use crate::CARGO_ENV; use super::custom_build::BuildDeps; use super::job::{Job, Work}; @@ -712,6 +713,7 @@ mtime_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, + cargo_exe: &Path, ) -> CargoResult> { match self { // We need to parse `dep_info`, learn about the crate's dependencies. @@ -727,7 +729,21 @@ None => return Ok(Some(StaleItem::MissingFile(dep_info))), }; for (key, previous) in info.env.iter() { - let current = env::var(key).ok(); + let current = if key == CARGO_ENV { + Some( + cargo_exe + .to_str() + .ok_or_else(|| { + format_err!( + "cargo exe path {} must be valid UTF-8", + cargo_exe.display() + ) + })? + .to_string(), + ) + } else { + env::var(key).ok() + }; if current == *previous { continue; } @@ -980,6 +996,7 @@ mtime_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, + cargo_exe: &Path, ) -> CargoResult<()> { assert!(!self.fs_status.up_to_date()); @@ -1071,7 +1088,9 @@ // files for this package itself. If we do find something log a helpful // message and bail out so we stay stale. for local in self.local.get_mut().unwrap().iter() { - if let Some(item) = local.find_stale_item(mtime_cache, pkg_root, target_root)? { + if let Some(item) = + local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe)? + { item.log(); return Ok(()); } @@ -1256,7 +1275,13 @@ // After we built the initial `Fingerprint` be sure to update the // `fs_status` field of it. let target_root = target_root(cx); - fingerprint.check_filesystem(&mut cx.mtime_cache, unit.pkg.root(), &target_root)?; + let cargo_exe = cx.bcx.config.cargo_exe()?; + fingerprint.check_filesystem( + &mut cx.mtime_cache, + unit.pkg.root(), + &target_root, + cargo_exe, + )?; let fingerprint = Arc::new(fingerprint); cx.fingerprints @@ -1286,7 +1311,7 @@ let target_root = target_root(cx); let local = if unit.mode.is_doc() { // rustdoc does not have dep-info files. - let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).chain_err(|| { + let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for documenting {}", unit.pkg @@ -1327,14 +1352,18 @@ // Include metadata since it is exposed as environment variables. let m = unit.pkg.manifest().metadata(); let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository)); - let config = if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map { - cx.bcx - .config - .doc_extern_map() - .map_or(0, |map| util::hash_u64(map)) - } else { - 0 - }; + let mut config = 0u64; + if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map { + config = config.wrapping_add( + cx.bcx + .config + .doc_extern_map() + .map_or(0, |map| util::hash_u64(map)), + ); + } + if let Some(allow_features) = &cx.bcx.config.cli_unstable().allow_features { + config = config.wrapping_add(util::hash_u64(allow_features)); + } let compile_kind = unit.kind.fingerprint_hash(); Ok(Fingerprint { rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), @@ -1371,7 +1400,7 @@ let local = (gen_local)( deps, Some(&|| { - pkg_fingerprint(cx.bcx, &unit.pkg).chain_err(|| { + pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for build script for {}", unit.pkg @@ -1639,7 +1668,7 @@ let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; let old_fingerprint: Fingerprint = serde_json::from_str(&old_fingerprint_json) - .chain_err(|| internal("failed to deserialize json"))?; + .with_context(|| internal("failed to deserialize json"))?; // Fingerprint can be empty after a failed rebuild (see comment in prepare_target). if !old_fingerprint_short.is_empty() { debug_assert_eq!(util::to_hex(old_fingerprint.hash()), old_fingerprint_short); @@ -1846,9 +1875,13 @@ // you write a binary that does `println!("{}", env!("OUT_DIR"))` we won't // recompile that if you move the target directory. Hopefully that's not too // bad of an issue for now... + // + // This also includes `CARGO` since if the code is explicitly wanting to + // know that path, it should be rebuilt if it changes. The CARGO path is + // not tracked elsewhere in the fingerprint. on_disk_info .env - .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key)); + .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key) || key == CARGO_ENV); for file in depinfo.files { // The path may be absolute or relative, canonical or not. Make sure @@ -1915,7 +1948,7 @@ _ => return None, }; let bytes = read_bytes(bytes)?; - files.push((ty, util::bytes2path(bytes).ok()?)); + files.push((ty, paths::bytes2path(bytes).ok()?)); } let nenv = read_usize(bytes)?; @@ -1960,7 +1993,7 @@ DepInfoPathType::PackageRootRelative => dst.push(0), DepInfoPathType::TargetRootRelative => dst.push(1), } - write_bytes(dst, util::path2bytes(file)?); + write_bytes(dst, paths::path2bytes(file)?); } write_usize(dst, self.env.len()); diff -Nru cargo-0.53.0/src/cargo/core/compiler/job_queue.rs cargo-0.54.0/src/cargo/core/compiler/job_queue.rs --- cargo-0.53.0/src/cargo/core/compiler/job_queue.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/job_queue.rs 2021-04-27 14:35:53.000000000 +0000 @@ -56,7 +56,8 @@ use std::sync::Arc; use std::time::Duration; -use anyhow::format_err; +use anyhow::{format_err, Context as _}; +use cargo_util::ProcessBuilder; use crossbeam_utils::thread::Scope; use jobserver::{Acquired, Client, HelperThread}; use log::{debug, info, trace}; @@ -77,8 +78,8 @@ use crate::drop_eprint; use crate::util::diagnostic_server::{self, DiagnosticPrinter}; use crate::util::machine_message::{self, Message as _}; +use crate::util::CargoResult; use crate::util::{self, internal, profile}; -use crate::util::{CargoResult, CargoResultExt, ProcessBuilder}; use crate::util::{Config, DependencyQueue, Progress, ProgressStyle, Queue}; /// This structure is backed by the `DependencyQueue` type and manages the @@ -439,7 +440,7 @@ .into_helper_thread(move |token| { messages.push(Message::Token(token)); }) - .chain_err(|| "failed to create helper thread for jobserver management")?; + .with_context(|| "failed to create helper thread for jobserver management")?; // Create a helper thread to manage the diagnostics for rustfix if // necessary. @@ -536,7 +537,7 @@ .push(token); client .release_raw() - .chain_err(|| "failed to release jobserver token")?; + .with_context(|| "failed to release jobserver token")?; } Ok(()) @@ -616,7 +617,7 @@ .push(FutureIncompatReportCrate { package_id, report }); } Message::Token(acquired_token) => { - let token = acquired_token.chain_err(|| "failed to acquire jobserver token")?; + let token = acquired_token.with_context(|| "failed to acquire jobserver token")?; self.tokens.push(token); } Message::NeedsToken(id) => { @@ -807,9 +808,16 @@ } fn emit_future_incompat(&mut self, cx: &mut Context<'_, '_>) { - if cx.bcx.config.cli_unstable().enable_future_incompat_feature - && !self.per_crate_future_incompat_reports.is_empty() - { + if cx.bcx.config.cli_unstable().future_incompat_report { + if self.per_crate_future_incompat_reports.is_empty() { + drop( + cx.bcx + .config + .shell() + .note("0 dependencies had future-incompat warnings"), + ); + return; + } self.per_crate_future_incompat_reports .sort_by_key(|r| r.package_id); diff -Nru cargo-0.53.0/src/cargo/core/compiler/layout.rs cargo-0.54.0/src/cargo/core/compiler/layout.rs --- cargo-0.53.0/src/cargo/core/compiler/layout.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/layout.rs 2021-04-27 14:35:53.000000000 +0000 @@ -100,8 +100,8 @@ use crate::core::compiler::CompileTarget; use crate::core::Workspace; -use crate::util::paths; use crate::util::{CargoResult, FileLock}; +use cargo_util::paths; use std::path::{Path, PathBuf}; /// Contains the paths of all target output locations. diff -Nru cargo-0.53.0/src/cargo/core/compiler/mod.rs cargo-0.54.0/src/cargo/core/compiler/mod.rs --- cargo-0.53.0/src/cargo/core/compiler/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -28,12 +28,14 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; -use anyhow::Error; +use anyhow::{Context as _, Error}; use lazycell::LazyCell; use log::debug; pub use self::build_config::{BuildConfig, CompileMode, MessageFormat}; -pub use self::build_context::{BuildContext, FileFlavor, FileType, RustcTargetData, TargetInfo}; +pub use self::build_context::{ + BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, +}; use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest, UnitOutput}; pub use self::compile_kind::{CompileKind, CompileTarget}; @@ -52,11 +54,11 @@ use crate::core::manifest::TargetSourcePath; use crate::core::profiles::{PanicStrategy, Profile, Strip}; use crate::core::{Feature, PackageId, Target}; -use crate::util::errors::{self, CargoResult, CargoResultExt, ProcessError, VerboseError}; +use crate::util::errors::{CargoResult, VerboseError}; use crate::util::interning::InternedString; -use crate::util::machine_message::Message; -use crate::util::{self, machine_message, ProcessBuilder}; -use crate::util::{add_path_args, internal, join_paths, paths, profile}; +use crate::util::machine_message::{self, Message}; +use crate::util::{add_path_args, internal, iter_join_onto, profile}; +use cargo_util::{paths, ProcessBuilder, ProcessError}; const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version"; @@ -227,9 +229,14 @@ let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let link_type = (&unit.target).into(); - let dep_info_name = match cx.files().metadata(unit) { - Some(metadata) => format!("{}-{}.d", unit.target.crate_name(), metadata), - None => format!("{}.d", unit.target.crate_name()), + let dep_info_name = if cx.files().use_extra_filename(unit) { + format!( + "{}-{}.d", + unit.target.crate_name(), + cx.files().metadata(unit) + ) + } else { + format!("{}.d", unit.target.crate_name()) }; let rustc_dep_info_loc = root.join(dep_info_name); let dep_info_loc = fingerprint::dep_info_loc(cx, unit); @@ -301,7 +308,7 @@ .as_ref() .and_then(|perr| perr.code) { - Some(n) if errors::is_simple_exit_code(n) => VerboseError::new(err).into(), + Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(), _ => err, } } @@ -329,7 +336,7 @@ }, ) .map_err(verbose_if_simple_exit_code) - .chain_err(|| format!("could not compile `{}`", name))?; + .with_context(|| format!("could not compile `{}`", name))?; } if rustc_dep_info_loc.exists() { @@ -343,7 +350,7 @@ // Do not track source files in the fingerprint for registry dependencies. is_local, ) - .chain_err(|| { + .with_context(|| { internal(format!( "could not parse/generate dep info at: {}", rustc_dep_info_loc.display() @@ -502,7 +509,7 @@ build_scripts: &BuildScripts, root_output: &Path, ) -> CargoResult<()> { - let var = util::dylib_path_envvar(); + let var = paths::dylib_path_envvar(); let search_path = rustc.get_env(var).unwrap_or_default(); let mut search_path = env::split_paths(&search_path).collect::>(); for (pkg_id, metadata) in &build_scripts.plugins { @@ -514,7 +521,7 @@ root_output, )); } - let search_path = join_paths(&search_path, var)?; + let search_path = paths::join_paths(&search_path, var)?; rustc.env(var, &search_path); Ok(()) } @@ -592,14 +599,14 @@ // script_metadata is not needed here, it is only for tests. let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; rustdoc.inherit_jobserver(&cx.jobserver); - rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); + let crate_name = unit.target.crate_name(); + rustdoc.arg("--crate-name").arg(&crate_name); add_path_args(bcx.ws, unit, &mut rustdoc); add_cap_lints(bcx, unit, &mut rustdoc); if let CompileKind::Target(target) = unit.kind { rustdoc.arg("--target").arg(target.rustc_target()); } - let doc_dir = cx.files().out_dir(unit); // Create the documentation directory ahead of time as rustdoc currently has @@ -607,13 +614,14 @@ // it doesn't already exist. paths::create_dir_all(&doc_dir)?; - rustdoc.arg("-o").arg(doc_dir); + rustdoc.arg("-o").arg(&doc_dir); for feat in &unit.features { rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } add_error_format_and_color(cx, &mut rustdoc, false); + add_allow_features(cx, &mut rustdoc); if let Some(args) = cx.bcx.extra_args_for(unit) { rustdoc.args(args); @@ -646,6 +654,13 @@ } } } + let crate_dir = doc_dir.join(&crate_name); + if crate_dir.exists() { + // Remove output from a previous build. This ensures that stale + // files for removed items are removed. + log::debug!("removing pre-existing doc directory {:?}", crate_dir); + paths::remove_dir_all(crate_dir)?; + } state.running(&rustdoc); rustdoc @@ -663,7 +678,7 @@ }, false, ) - .chain_err(|| format!("could not document `{}`", name))?; + .with_context(|| format!("could not document `{}`", name))?; Ok(()) })) } @@ -696,6 +711,15 @@ } } +/// Forward -Zallow-features if it is set for cargo. +fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { + if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features { + let mut arg = String::from("-Zallow-features="); + let _ = iter_join_onto(&mut arg, allow, ","); + cmd.arg(&arg); + } +} + /// Add error-format flags to the command. /// /// Cargo always uses JSON output. This has several benefits, such as being @@ -772,6 +796,7 @@ add_path_args(bcx.ws, unit, cmd); add_error_format_and_color(cx, cmd, cx.rmeta_required(unit)); + add_allow_features(cx, cmd); if !test { for crate_type in crate_types.iter() { @@ -869,15 +894,10 @@ cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } - match cx.files().metadata(unit) { - Some(m) => { - cmd.arg("-C").arg(&format!("metadata={}", m)); - cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); - } - None => { - cmd.arg("-C") - .arg(&format!("metadata={}", cx.files().target_short_hash(unit))); - } + let meta = cx.files().metadata(unit); + cmd.arg("-C").arg(&format!("metadata={}", meta)); + if cx.files().use_extra_filename(unit) { + cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); } if rpath { @@ -924,7 +944,7 @@ .env("RUSTC_BOOTSTRAP", "1"); } - if bcx.config.cli_unstable().enable_future_incompat_feature { + if bcx.config.cli_unstable().future_incompat_report { cmd.arg("-Z").arg("emit-future-incompat-report"); } diff -Nru cargo-0.53.0/src/cargo/core/compiler/output_depinfo.rs cargo-0.54.0/src/cargo/core/compiler/output_depinfo.rs --- cargo-0.53.0/src/cargo/core/compiler/output_depinfo.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/output_depinfo.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,11 +26,10 @@ use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; -use log::debug; - use super::{fingerprint, Context, FileFlavor, Unit}; -use crate::util::paths; use crate::util::{internal, CargoResult}; +use cargo_util::paths; +use log::debug; fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { let path = path.as_ref(); diff -Nru cargo-0.53.0/src/cargo/core/compiler/rustdoc.rs cargo-0.54.0/src/cargo/core/compiler/rustdoc.rs --- cargo-0.53.0/src/cargo/core/compiler/rustdoc.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/rustdoc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,7 +5,7 @@ use crate::core::compiler::CompileKind; use crate::sources::CRATES_IO_REGISTRY; use crate::util::errors::{internal, CargoResult}; -use crate::util::ProcessBuilder; +use cargo_util::ProcessBuilder; use std::collections::HashMap; use std::fmt; use std::hash; diff -Nru cargo-0.53.0/src/cargo/core/compiler/standard_lib.rs cargo-0.54.0/src/cargo/core/compiler/standard_lib.rs --- cargo-0.53.0/src/cargo/core/compiler/standard_lib.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/standard_lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,8 +3,8 @@ use crate::core::compiler::UnitInterner; use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{FeaturesFor, RequestedFeatures, ResolvedFeatures}; -use crate::core::resolver::{HasDevUnits, ResolveOpts}; +use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; +use crate::core::resolver::HasDevUnits; use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; use crate::ops::{self, Packages}; use crate::util::errors::CargoResult; @@ -33,7 +33,7 @@ /// Resolve the standard library dependencies. pub fn resolve_std<'cfg>( ws: &Workspace<'cfg>, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, requested_targets: &[CompileKind], crates: &[String], ) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> { @@ -107,18 +107,14 @@ "default".to_string(), ], }; - // dev_deps setting shouldn't really matter here. - let opts = ResolveOpts::new( - /*dev_deps*/ false, - RequestedFeatures::from_command_line( - &features, /*all_features*/ false, /*uses_default_features*/ false, - ), - ); + let cli_features = CliFeatures::from_command_line( + &features, /*all_features*/ false, /*uses_default_features*/ false, + )?; let resolve = ops::resolve_ws_with_opts( &std_ws, target_data, requested_targets, - &opts, + &cli_features, &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, @@ -162,17 +158,18 @@ // in time is minimal, and the difference in caching is // significant. let mode = CompileMode::Build; - let profile = profiles.get_profile( - pkg.package_id(), - /*is_member*/ false, - /*is_local*/ false, - unit_for, - mode, - ); let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); for kind in kinds { let list = ret.entry(*kind).or_insert_with(Vec::new); + let profile = profiles.get_profile( + pkg.package_id(), + /*is_member*/ false, + /*is_local*/ false, + unit_for, + mode, + *kind, + ); list.push(interner.intern( pkg, lib, @@ -188,7 +185,7 @@ Ok(ret) } -fn detect_sysroot_src_path(target_data: &RustcTargetData) -> CargoResult { +fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { if let Some(s) = env::var_os("__CARGO_TESTS_ONLY_SRC_ROOT") { return Ok(s.into()); } diff -Nru cargo-0.53.0/src/cargo/core/compiler/timings.rs cargo-0.54.0/src/cargo/core/compiler/timings.rs --- cargo-0.53.0/src/cargo/core/compiler/timings.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/timings.rs 2021-04-27 14:35:53.000000000 +0000 @@ -8,7 +8,9 @@ use crate::core::PackageId; use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; -use crate::util::{paths, CargoResult, CargoResultExt, Config}; +use crate::util::{CargoResult, Config}; +use anyhow::Context as _; +use cargo_util::paths; use std::collections::HashMap; use std::io::{BufWriter, Write}; use std::time::{Duration, Instant, SystemTime}; @@ -323,7 +325,7 @@ .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { self.report_html(bcx, error) - .chain_err(|| "failed to save timing report")?; + .with_context(|| "failed to save timing report")?; } Ok(()) } diff -Nru cargo-0.53.0/src/cargo/core/compiler/unit_dependencies.rs cargo-0.54.0/src/cargo/core/compiler/unit_dependencies.rs --- cargo-0.53.0/src/cargo/core/compiler/unit_dependencies.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/unit_dependencies.rs 2021-04-27 14:35:53.000000000 +0000 @@ -44,7 +44,7 @@ /// library. is_std: bool, global_mode: CompileMode, - target_data: &'a RustcTargetData, + target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, @@ -63,7 +63,7 @@ roots: &[Unit], std_roots: &HashMap>, global_mode: CompileMode, - target_data: &'a RustcTargetData, + target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, ) -> CargoResult { @@ -585,6 +585,7 @@ is_local, unit_for, mode, + kind, ); new_unit_dep_with_profile(state, parent, pkg, target, unit_for, kind, mode, profile) } diff -Nru cargo-0.53.0/src/cargo/core/dependency.rs cargo-0.54.0/src/cargo/core/dependency.rs --- cargo-0.53.0/src/cargo/core/dependency.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/dependency.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,3 +1,4 @@ +use anyhow::Context as _; use cargo_platform::Platform; use log::trace; use semver::ReqParseError; @@ -8,7 +9,7 @@ use std::rc::Rc; use crate::core::{PackageId, SourceId, Summary}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::Config; @@ -132,7 +133,7 @@ } Err(e) => { let err: CargoResult = Err(e.into()); - let v: VersionReq = err.chain_err(|| { + let v: VersionReq = err.with_context(|| { format!( "failed to parse the version requirement `{}` for dependency `{}`", req, name diff -Nru cargo-0.53.0/src/cargo/core/features.rs cargo-0.54.0/src/cargo/core/features.rs --- cargo-0.53.0/src/cargo/core/features.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -68,7 +68,6 @@ //! get an instance of `CliUnstable` and check if the option has been //! enabled on the `CliUnstable` instance. Nightly gating is already //! handled, so no need to worry about that. -//! 4. Update the `-Z help` documentation in the `main` function. //! //! ## Stabilization //! @@ -83,26 +82,29 @@ //! 2. `-Z unstable-options`: Find the call to `fail_if_stable_opt` and //! remove it. Be sure to update the man pages if necessary. //! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`] to call -//! `stabilized_warn` or `stabilized_err`. Remove it from the `-Z help` -//! docs in the `main` function. Remove the `(unstable)` note in the -//! clap help text if necessary. +//! `stabilized_warn` or `stabilized_err` and remove the field from +//! `CliUnstable. Remove the `(unstable)` note in the clap help text if +//! necessary. //! 2. Remove `masquerade_as_nightly_cargo` from any tests, and remove //! `cargo-features` from `Cargo.toml` test files if any. //! 3. Remove the docs from unstable.md and update the redirect at the bottom //! of that page. Update the rest of the documentation to add the new //! feature. +use std::collections::BTreeSet; use std::env; use std::fmt; use std::str::FromStr; use anyhow::{bail, Error}; +use cargo_util::ProcessBuilder; use serde::{Deserialize, Serialize}; use crate::util::errors::CargoResult; -use crate::util::{indented_lines, ProcessBuilder}; +use crate::util::{indented_lines, iter_join}; use crate::Config; +pub const HIDDEN: &str = ""; pub const SEE_CHANNELS: &str = "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ about Rust release channels."; @@ -388,6 +390,9 @@ // Support for 2021 edition. (unstable, edition2021, "", "reference/unstable.html#edition-2021"), + + // Allow to specify per-package targets (compile kinds) + (unstable, per_package_target, "", "reference/unstable.html#per-package-target"), } const PUBLISH_LOCKFILE_REMOVED: &str = "The publish-lockfile key in Cargo.toml \ @@ -415,13 +420,18 @@ let mut ret = Features::default(); ret.nightly_features_allowed = config.nightly_features_allowed; for feature in features { - ret.add(feature, warnings)?; + ret.add(feature, config, warnings)?; ret.activated.push(feature.to_string()); } Ok(ret) } - fn add(&mut self, feature_name: &str, warnings: &mut Vec) -> CargoResult<()> { + fn add( + &mut self, + feature_name: &str, + config: &Config, + warnings: &mut Vec, + ) -> CargoResult<()> { let nightly_features_allowed = self.nightly_features_allowed; let (slot, feature) = match self.status(feature_name) { Some(p) => p, @@ -469,7 +479,17 @@ SEE_CHANNELS, see_docs() ), - Status::Unstable => {} + Status::Unstable => { + if let Some(allow) = &config.cli_unstable().allow_features { + if !allow.contains(feature_name) { + bail!( + "the feature `{}` is not in the list of allowed features: [{}]", + feature_name, + iter_join(allow, ", "), + ); + } + } + } Status::Removed => bail!( "the cargo feature `{}` has been removed\n\ Remove the feature from Cargo.toml to remove this error.\n\ @@ -521,47 +541,75 @@ } } -/// A parsed representation of all unstable flags that Cargo accepts. -/// -/// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for -/// gating unstable functionality to Cargo. These flags are only available on -/// the nightly channel of Cargo. -#[derive(Default, Debug, Deserialize)] -#[serde(default, rename_all = "kebab-case")] -pub struct CliUnstable { - pub print_im_a_teapot: bool, - pub unstable_options: bool, - pub no_index_update: bool, - pub avoid_dev_deps: bool, - pub minimal_versions: bool, - pub advanced_env: bool, - pub config_include: bool, - pub dual_proc_macros: bool, - pub mtime_on_use: bool, - pub named_profiles: bool, - pub binary_dep_depinfo: bool, - #[serde(deserialize_with = "deserialize_build_std")] - pub build_std: Option>, - pub build_std_features: Option>, - pub timings: Option>, - pub doctest_xcompile: bool, - pub doctest_in_workspace: bool, - pub panic_abort_tests: bool, - pub jobserver_per_rustc: bool, - pub features: Option>, - pub separate_nightlies: bool, - pub multitarget: bool, - pub rustdoc_map: bool, - pub terminal_width: Option>, - pub namespaced_features: bool, - pub weak_dep_features: bool, - pub extra_link_arg: bool, - pub patch_in_config: bool, - pub credential_process: bool, - pub configurable_env: bool, - pub enable_future_incompat_feature: bool, +macro_rules! unstable_cli_options { + ( + $( + $(#[$meta:meta])? + $element: ident: $ty: ty = ($help: expr ), + )* + ) => { + /// A parsed representation of all unstable flags that Cargo accepts. + /// + /// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for + /// gating unstable functionality to Cargo. These flags are only available on + /// the nightly channel of Cargo. + #[derive(Default, Debug, Deserialize)] + #[serde(default, rename_all = "kebab-case")] + pub struct CliUnstable { + $( + $(#[$meta])? + pub $element: $ty + ),* + } + impl CliUnstable { + pub fn help() -> Vec<(&'static str, &'static str)> { + let fields = vec![$((stringify!($element), $help)),*]; + fields + } + } + } } +unstable_cli_options!( + // Permanently unstable features: + allow_features: Option> = ("Allow *only* the listed unstable features"), + print_im_a_teapot: bool= (HIDDEN), + + // All other unstable features. + // Please keep this list lexiographically ordered. + advanced_env: bool = (HIDDEN), + avoid_dev_deps: bool = ("Avoid installing dev-dependencies if possible"), + binary_dep_depinfo: bool = ("Track changes to dependency artifacts"), + #[serde(deserialize_with = "deserialize_build_std")] + build_std: Option> = ("Enable Cargo to compile the standard library itself as part of a crate graph compilation"), + build_std_features: Option> = ("Configure features enabled for the standard library itself when building the standard library"), + config_include: bool = ("Enable the `include` key in config files"), + configurable_env: bool = ("Enable the [env] section in the .cargo/config.toml file"), + credential_process: bool = ("Add a config setting to fetch registry authentication tokens by calling an external process"), + doctest_in_workspace: bool = ("Compile doctests with paths relative to the workspace root"), + doctest_xcompile: bool = ("Compile and run doctests for non-host target using runner config"), + dual_proc_macros: bool = ("Build proc-macros for both the host and the target"), + future_incompat_report: bool = ("Enable creation of a future-incompat report for all dependencies"), + extra_link_arg: bool = ("Allow `cargo:rustc-link-arg` in build scripts"), + features: Option> = (HIDDEN), + jobserver_per_rustc: bool = (HIDDEN), + minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"), + mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"), + multitarget: bool = ("Allow passing multiple `--target` flags to the cargo subcommand selected"), + named_profiles: bool = ("Allow defining custom profiles"), + namespaced_features: bool = ("Allow features with `dep:` prefix"), + no_index_update: bool = ("Do not update the registry index even if the cache is outdated"), + panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"), + patch_in_config: bool = ("Allow `[patch]` sections in .cargo/config.toml files"), + rustdoc_map: bool = ("Allow passing external documentation mappings to rustdoc"), + separate_nightlies: bool = (HIDDEN), + terminal_width: Option> = ("Provide a terminal width to rustc for error truncation"), + timings: Option> = ("Display concurrency information"), + unstable_options: bool = ("Allow the usage of unstable options"), + weak_dep_features: bool = ("Allow `dep_name?/feature` feature syntax"), + skip_rustdoc_fingerprint: bool = (HIDDEN), +); + const STABILIZED_COMPILE_PROGRESS: &str = "The progress bar is now always \ enabled when used on an interactive console.\n\ See https://doc.rust-lang.org/cargo/reference/config.html#termprogresswhen \ @@ -626,6 +674,13 @@ ); } let mut warnings = Vec::new(); + // We read flags twice, first to get allowed-features (if specified), + // and then to read the remaining unstable flags. + for flag in flags { + if flag.starts_with("allow-features=") { + self.add(flag, &mut warnings)?; + } + } for flag in flags { self.add(flag, &mut warnings)?; } @@ -655,6 +710,7 @@ fn parse_features(value: Option<&str>) -> Vec { match value { None => Vec::new(), + Some("") => Vec::new(), Some(v) => v.split(',').map(|s| s.to_string()).collect(), } } @@ -697,8 +753,19 @@ )) }; + if let Some(allowed) = &self.allow_features { + if k != "allow-features" && !allowed.contains(k) { + bail!( + "the feature `{}` is not in the list of allowed features: [{}]", + k, + iter_join(allowed, ", ") + ); + } + } + match k { "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(k, v)?, + "allow-features" => self.allow_features = Some(parse_features(v).into_iter().collect()), "unstable-options" => self.unstable_options = parse_empty(k, v)?, "no-index-update" => self.no_index_update = parse_empty(k, v)?, "avoid-dev-deps" => self.avoid_dev_deps = parse_empty(k, v)?, @@ -750,6 +817,7 @@ "weak-dep-features" => self.weak_dep_features = parse_empty(k, v)?, "extra-link-arg" => self.extra_link_arg = parse_empty(k, v)?, "credential-process" => self.credential_process = parse_empty(k, v)?, + "skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?, "compile-progress" => stabilized_warn(k, "1.30", STABILIZED_COMPILE_PROGRESS), "offline" => stabilized_err(k, "1.36", STABILIZED_OFFLINE)?, "cache-messages" => stabilized_warn(k, "1.40", STABILIZED_CACHE_MESSAGES), @@ -757,16 +825,15 @@ "config-profile" => stabilized_warn(k, "1.43", STABILIZED_CONFIG_PROFILE), "crate-versions" => stabilized_warn(k, "1.47", STABILIZED_CRATE_VERSIONS), "package-features" => stabilized_warn(k, "1.51", STABILIZED_PACKAGE_FEATURES), - "future-incompat-report" => self.enable_future_incompat_feature = parse_empty(k, v)?, + "future-incompat-report" => self.future_incompat_report = parse_empty(k, v)?, _ => bail!("unknown `-Z` flag specified: {}", k), } Ok(()) } - /// Generates an error if `-Z unstable-options` was not used. - /// Intended to be used when a user passes a command-line flag that - /// requires `-Z unstable-options`. + /// Generates an error if `-Z unstable-options` was not used for a new, + /// unstable command-line flag. pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> { if !self.unstable_options { let see = format!( @@ -798,6 +865,43 @@ } Ok(()) } + + /// Generates an error if `-Z unstable-options` was not used for a new, + /// unstable subcommand. + pub fn fail_if_stable_command( + &self, + config: &Config, + command: &str, + issue: u32, + ) -> CargoResult<()> { + if self.unstable_options { + return Ok(()); + } + let see = format!( + "See https://github.com/rust-lang/cargo/issues/{} for more \ + information about the `cargo {}` command.", + issue, command + ); + if config.nightly_features_allowed { + bail!( + "the `cargo {}` command is unstable, pass `-Z unstable-options` to enable it\n\ + {}", + command, + see + ); + } else { + bail!( + "the `cargo {}` command is unstable, and only available on the \ + nightly channel of Cargo, but this is the `{}` channel\n\ + {}\n\ + {}", + command, + channel(), + SEE_CHANNELS, + see + ); + } + } } /// Returns the current release channel ("stable", "beta", "nightly", "dev"). diff -Nru cargo-0.53.0/src/cargo/core/manifest.rs cargo-0.54.0/src/cargo/core/manifest.rs --- cargo-0.53.0/src/cargo/core/manifest.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/manifest.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,12 +5,13 @@ use std::rc::Rc; use std::sync::Arc; +use anyhow::Context as _; use semver::Version; use serde::ser; use serde::Serialize; use url::Url; -use crate::core::compiler::CrateType; +use crate::core::compiler::{CompileKind, CrateType}; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; use crate::core::{Edition, Feature, Features, WorkspaceConfig}; @@ -31,6 +32,8 @@ pub struct Manifest { summary: Summary, targets: Vec, + default_kind: Option, + forced_kind: Option, links: Option, warnings: Warnings, exclude: Vec, @@ -365,6 +368,8 @@ impl Manifest { pub fn new( summary: Summary, + default_kind: Option, + forced_kind: Option, targets: Vec, exclude: Vec, include: Vec, @@ -387,6 +392,8 @@ ) -> Manifest { Manifest { summary, + default_kind, + forced_kind, targets, warnings: Warnings::new(), exclude, @@ -413,6 +420,12 @@ pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } + pub fn default_kind(&self) -> Option { + self.default_kind + } + pub fn forced_kind(&self) -> Option { + self.forced_kind + } pub fn exclude(&self) -> &[String] { &self.exclude } @@ -496,11 +509,18 @@ if self.im_a_teapot.is_some() { self.unstable_features .require(Feature::test_dummy_unstable()) - .chain_err(|| { - anyhow::format_err!( - "the `im-a-teapot` manifest key is unstable and may \ - not work properly in England" - ) + .with_context(|| { + "the `im-a-teapot` manifest key is unstable and may \ + not work properly in England" + })?; + } + + if self.default_kind.is_some() || self.forced_kind.is_some() { + self.unstable_features + .require(Feature::per_package_target()) + .with_context(|| { + "the `package.default-target` and `package.forced-target` \ + manifest keys are unstable and may not work properly" })?; } diff -Nru cargo-0.53.0/src/cargo/core/package_id_spec.rs cargo-0.54.0/src/cargo/core/package_id_spec.rs --- cargo-0.53.0/src/cargo/core/package_id_spec.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/package_id_spec.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,13 +1,13 @@ use std::collections::HashMap; use std::fmt; -use anyhow::bail; +use anyhow::{bail, Context as _}; use semver::Version; use serde::{de, ser}; use url::Url; use crate::core::PackageId; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::lev_distance; use crate::util::{validate_package_name, IntoUrl, ToSemver}; @@ -85,9 +85,9 @@ I: IntoIterator, { let i: Vec<_> = i.into_iter().collect(); - let spec = PackageIdSpec::parse(spec).chain_err(|| { + let spec = PackageIdSpec::parse(spec).with_context(|| { let suggestion = lev_distance::closest_msg(spec, i.iter(), |id| id.name().as_str()); - anyhow::format_err!("invalid package ID specification: `{}`{}", spec, suggestion) + format!("invalid package ID specification: `{}`{}", spec, suggestion) })?; spec.query(i) } diff -Nru cargo-0.53.0/src/cargo/core/package.rs cargo-0.54.0/src/cargo/core/package.rs --- cargo-0.53.0/src/cargo/core/package.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/package.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,7 +26,7 @@ use crate::core::{SourceMap, Summary, Workspace}; use crate::ops; use crate::util::config::PackageCacheLock; -use crate::util::errors::{CargoResult, CargoResultExt, HttpNot200}; +use crate::util::errors::{CargoResult, HttpNot200}; use crate::util::interning::InternedString; use crate::util::network::Retry; use crate::util::{self, internal, Config, Progress, ProgressStyle}; @@ -420,7 +420,7 @@ let multiplexing = config.http_config()?.multiplexing.unwrap_or(true); multi .pipelining(false, multiplexing) - .chain_err(|| "failed to enable multiplexing/pipelining in curl")?; + .with_context(|| "failed to enable multiplexing/pipelining in curl")?; // let's not flood crates.io with connections multi.set_max_host_connections(2)?; @@ -500,7 +500,7 @@ root_ids: &[PackageId], has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { fn collect_used_deps( @@ -509,7 +509,7 @@ pkg_id: PackageId, has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], - target_data: &RustcTargetData, + target_data: &RustcTargetData<'_>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { if !used.insert(pkg_id) { @@ -616,7 +616,7 @@ /// the package is ready and doesn't need to be downloaded. pub fn start(&mut self, id: PackageId) -> CargoResult> { self.start_inner(id) - .chain_err(|| format!("failed to download `{}`", id)) + .with_context(|| format!("failed to download `{}`", id)) } fn start_inner(&mut self, id: PackageId) -> CargoResult> { @@ -640,7 +640,7 @@ .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; let pkg = source .download(id) - .chain_err(|| anyhow::format_err!("unable to get packages from source"))?; + .with_context(|| "unable to get packages from source")?; let (url, descriptor) = match pkg { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); @@ -814,7 +814,7 @@ } Ok(()) }) - .chain_err(|| format!("failed to download from `{}`", dl.url))? + .with_context(|| format!("failed to download from `{}`", dl.url))? }; match ret { Some(()) => break (dl, data), @@ -912,7 +912,7 @@ self.set .multi .perform() - .chain_err(|| "failed to perform http requests") + .with_context(|| "failed to perform http requests") })?; debug!("handles remaining: {}", n); let results = &mut self.results; @@ -939,7 +939,7 @@ self.set .multi .wait(&mut [], timeout) - .chain_err(|| "failed to wait on curl `Multi`")?; + .with_context(|| "failed to wait on curl `Multi`")?; } } diff -Nru cargo-0.53.0/src/cargo/core/profiles.rs cargo-0.54.0/src/cargo/core/profiles.rs --- cargo-0.53.0/src/cargo/core/profiles.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/profiles.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,10 @@ -use crate::core::compiler::{CompileMode, Unit}; +use crate::core::compiler::{CompileKind, CompileMode, Unit}; use crate::core::resolver::features::FeaturesFor; use crate::core::{Feature, PackageId, PackageIdSpec, Resolve, Shell, Target, Workspace}; -use crate::util::errors::CargoResultExt; use crate::util::interning::InternedString; use crate::util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool}; use crate::util::{closest_msg, config, CargoResult, Config}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use std::collections::{BTreeMap, HashMap, HashSet}; use std::hash::Hash; use std::{cmp, env, fmt, hash}; @@ -291,6 +290,7 @@ is_local: bool, unit_for: UnitFor, mode: CompileMode, + kind: CompileKind, ) -> Profile { let (profile_name, inherits) = if !self.named_profiles_enabled { // With the feature disabled, we degrade `--profile` back to the @@ -346,6 +346,23 @@ } } + // Default macOS debug information to being stored in the "unpacked" + // split-debuginfo format. At the time of this writing that's the only + // platform which has a stable `-Csplit-debuginfo` option for rustc, + // and it's typically much faster than running `dsymutil` on all builds + // in incremental cases. + if let Some(debug) = profile.debuginfo { + if profile.split_debuginfo.is_none() && debug > 0 { + let target = match &kind { + CompileKind::Host => self.rustc_host.as_str(), + CompileKind::Target(target) => target.short_name(), + }; + if target.contains("-apple-") { + profile.split_debuginfo = Some(InternedString::new("unpacked")); + } + } + } + // Incremental can be globally overridden. if let Some(v) = self.incremental { profile.incremental = v; @@ -1124,11 +1141,10 @@ profile .val .validate(name, ws.unstable_features(), &mut warnings) - .chain_err(|| { - anyhow::format_err!( + .with_context(|| { + format!( "config profile `{}` is not valid (defined in `{}`)", - name, - profile.definition + name, profile.definition ) })?; for warning in warnings { diff -Nru cargo-0.53.0/src/cargo/core/registry.rs cargo-0.54.0/src/cargo/core/registry.rs --- cargo-0.53.0/src/cargo/core/registry.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,10 +3,10 @@ use crate::core::PackageSet; use crate::core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; use crate::sources::config::SourceConfigMap; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{profile, CanonicalUrl, Config}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use log::{debug, trace}; use semver::VersionReq; use url::Url; @@ -107,6 +107,21 @@ Normal, } +/// Argument to `PackageRegistry::patch` which is information about a `[patch]` +/// directive that we found in a lockfile, if present. +pub struct LockedPatchDependency { + /// The original `Dependency` directive, except "locked" so it's version + /// requirement is `=foo` and its `SourceId` has a "precise" listed. + pub dependency: Dependency, + /// The `PackageId` that was previously found in a lock file which + /// `dependency` matches. + pub package_id: PackageId, + /// Something only used for backwards compatibility with the v2 lock file + /// format where `branch=master` is considered the same as `DefaultBranch`. + /// For more comments on this see the code in `ops/resolve.rs`. + pub alt_package_id: Option, +} + impl<'cfg> PackageRegistry<'cfg> { pub fn new(config: &'cfg Config) -> CargoResult> { let source_config = SourceConfigMap::new(config)?; @@ -240,7 +255,7 @@ pub fn patch( &mut self, url: &Url, - deps: &[(&Dependency, Option<(Dependency, PackageId)>)], + deps: &[(&Dependency, Option)], ) -> CargoResult> { // NOTE: None of this code is aware of required features. If a patch // is missing a required feature, you end up with an "unused patch" @@ -268,7 +283,7 @@ let orig_patch = *orig_patch; // Use the locked patch if it exists, otherwise use the original. let dep = match locked { - Some((locked_patch, _locked_id)) => locked_patch, + Some(lock) => &lock.dependency, None => orig_patch, }; debug!( @@ -281,8 +296,8 @@ // normally would and then ask it directly for the list of summaries // corresponding to this `dep`. self.ensure_loaded(dep.source_id(), Kind::Normal) - .chain_err(|| { - anyhow::format_err!( + .with_context(|| { + format!( "failed to load source for dependency `{}`", dep.package_name() ) @@ -293,14 +308,16 @@ .get_mut(dep.source_id()) .expect("loaded source not present"); let summaries = source.query_vec(dep)?; - let (summary, should_unlock) = - summary_for_patch(orig_patch, locked, summaries, source).chain_err(|| { - format!( - "patch for `{}` in `{}` failed to resolve", - orig_patch.package_name(), - url, - ) - })?; + let (summary, should_unlock) = summary_for_patch( + orig_patch, locked, summaries, source, + ) + .with_context(|| { + format!( + "patch for `{}` in `{}` failed to resolve", + orig_patch.package_name(), + url, + ) + })?; debug!( "patch summary is {:?} should_unlock={:?}", summary, should_unlock @@ -320,7 +337,7 @@ Ok(summary) }) .collect::>>() - .chain_err(|| anyhow::format_err!("failed to resolve patches for `{}`", url))?; + .with_context(|| format!("failed to resolve patches for `{}`", url))?; let mut name_and_version = HashSet::new(); for summary in unlocked_summaries.iter() { @@ -336,13 +353,36 @@ } } + // Calculate a list of all patches available for this source which is + // then used later during calls to `lock` to rewrite summaries to point + // directly at these patched entries. + // + // Note that this is somewhat subtle where the list of `ids` for a + // canonical URL is extend with possibly two ids per summary. This is done + // to handle the transition from the v2->v3 lock file format where in + // v2 DefeaultBranch was either DefaultBranch or Branch("master") for + // git dependencies. In this case if `summary.package_id()` is + // Branch("master") then alt_package_id will be DefaultBranch. This + // signifies that there's a patch available for either of those + // dependency directives if we see them in the dependency graph. + // + // This is a bit complicated and hopefully an edge case we can remove + // in the future, but for now it hopefully doesn't cause too much + // harm... + let mut ids = Vec::new(); + for (summary, (_, lock)) in unlocked_summaries.iter().zip(deps) { + ids.push(summary.package_id()); + if let Some(lock) = lock { + ids.extend(lock.alt_package_id); + } + } + self.patches_available.insert(canonical.clone(), ids); + // Note that we do not use `lock` here to lock summaries! That step // happens later once `lock_patches` is invoked. In the meantime though // we want to fill in the `patches_available` map (later used in the // `lock` method) and otherwise store the unlocked summaries in // `patches` to get locked in a future call to `lock_patches`. - let ids = unlocked_summaries.iter().map(|s| s.package_id()).collect(); - self.patches_available.insert(canonical.clone(), ids); self.patches.insert(canonical, unlocked_summaries); Ok(unlock_patches) @@ -388,7 +428,7 @@ let _p = profile::start(format!("updating: {}", source_id)); self.sources.get_mut(source_id).unwrap().update() })() - .chain_err(|| anyhow::format_err!("Unable to update {}", source_id))?; + .with_context(|| format!("Unable to update {}", source_id))?; Ok(()) } @@ -539,8 +579,8 @@ // Ensure the requested source_id is loaded self.ensure_loaded(dep.source_id(), Kind::Normal) - .chain_err(|| { - anyhow::format_err!( + .with_context(|| { + format!( "failed to load source for dependency `{}`", dep.package_name() ) @@ -745,7 +785,7 @@ /// This is a helper for selecting the summary, or generating a helpful error message. fn summary_for_patch( orig_patch: &Dependency, - locked: &Option<(Dependency, PackageId)>, + locked: &Option, mut summaries: Vec, source: &mut dyn Source, ) -> CargoResult<(Summary, Option)> { @@ -777,7 +817,7 @@ } assert!(summaries.is_empty()); // No summaries found, try to help the user figure out what is wrong. - if let Some((_locked_patch, locked_id)) = locked { + if let Some(locked) = locked { // Since the locked patch did not match anything, try the unlocked one. let orig_matches = source.query_vec(orig_patch).unwrap_or_else(|e| { log::warn!( @@ -790,7 +830,7 @@ let (summary, _) = summary_for_patch(orig_patch, &None, orig_matches, source)?; // The unlocked version found a match. This returns a value to // indicate that this entry should be unlocked. - return Ok((summary, Some(*locked_id))); + return Ok((summary, Some(locked.package_id))); } // Try checking if there are *any* packages that match this by name. let name_only_dep = Dependency::new_override(orig_patch.package_name(), orig_patch.source_id()); diff -Nru cargo-0.53.0/src/cargo/core/resolver/context.rs cargo-0.54.0/src/cargo/core/resolver/context.rs --- cargo-0.53.0/src/cargo/core/resolver/context.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/context.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,7 @@ use super::dep_cache::RegistryQueryer; use super::errors::ActivateResult; use super::types::{ConflictMap, ConflictReason, FeaturesSet, ResolveOpts}; +use super::RequestedFeatures; use crate::core::{Dependency, PackageId, SourceId, Summary}; use crate::util::interning::InternedString; use crate::util::Graph; @@ -160,23 +161,32 @@ } } debug!("checking if {} is already activated", summary.package_id()); - if opts.features.all_features { - return Ok(false); - } - - let has_default_feature = summary.features().contains_key("default"); - Ok(match self.resolve_features.get(&id) { - Some(prev) => { - opts.features.features.is_subset(prev) - && (!opts.features.uses_default_features - || prev.contains("default") - || !has_default_feature) - } - None => { - opts.features.features.is_empty() - && (!opts.features.uses_default_features || !has_default_feature) + match &opts.features { + // This returns `false` for CliFeatures just for simplicity. It + // would take a bit of work to compare since they are not in the + // same format as DepFeatures (and that may be expensive + // performance-wise). Also, it should only occur once for a root + // package. The only drawback is that it may re-activate a root + // package again, which should only affect performance, but that + // should be rare. Cycles should still be detected since those + // will have `DepFeatures` edges. + RequestedFeatures::CliFeatures(_) => Ok(false), + RequestedFeatures::DepFeatures { + features, + uses_default_features, + } => { + let has_default_feature = summary.features().contains_key("default"); + Ok(match self.resolve_features.get(&id) { + Some(prev) => { + features.is_subset(prev) + && (!uses_default_features + || prev.contains("default") + || !has_default_feature) + } + None => features.is_empty() && (!uses_default_features || !has_default_feature), + }) } - }) + } } /// If the package is active returns the `ContextAge` when it was added diff -Nru cargo-0.53.0/src/cargo/core/resolver/dep_cache.rs cargo-0.54.0/src/cargo/core/resolver/dep_cache.rs --- cargo-0.53.0/src/cargo/core/resolver/dep_cache.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/dep_cache.rs 2021-04-27 14:35:53.000000000 +0000 @@ -12,12 +12,14 @@ use crate::core::resolver::context::Context; use crate::core::resolver::errors::describe_path; use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; -use crate::core::resolver::{ActivateError, ActivateResult, ResolveOpts}; +use crate::core::resolver::{ + ActivateError, ActivateResult, CliFeatures, RequestedFeatures, ResolveOpts, +}; use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; -use crate::core::{GitReference, SourceId}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::Config; + +use anyhow::Context as _; use log::debug; use std::cmp::Ordering; use std::collections::{BTreeSet, HashMap, HashSet}; @@ -40,10 +42,6 @@ >, /// all the cases we ended up using a supplied replacement used_replacements: HashMap, - /// Where to print warnings, if configured. - config: Option<&'a Config>, - /// Sources that we've already wared about possibly colliding in the future. - warned_git_collisions: HashSet, } impl<'a> RegistryQueryer<'a> { @@ -52,7 +50,6 @@ replacements: &'a [(PackageIdSpec, Dependency)], try_to_use: &'a HashSet, minimal_versions: bool, - config: Option<&'a Config>, ) -> Self { RegistryQueryer { registry, @@ -62,8 +59,6 @@ registry_cache: HashMap::new(), summary_cache: HashMap::new(), used_replacements: HashMap::new(), - config, - warned_git_collisions: HashSet::new(), } } @@ -75,44 +70,6 @@ self.used_replacements.get(&p) } - /// Issues a future-compatible warning targeted at removing reliance on - /// unifying behavior between these two dependency directives: - /// - /// ```toml - /// [dependencies] - /// a = { git = 'https://example.org/foo' } - /// a = { git = 'https://example.org/foo', branch = 'master } - /// ``` - /// - /// Historical versions of Cargo considered these equivalent but going - /// forward we'd like to fix this. For more details see the comments in - /// src/cargo/sources/git/utils.rs - fn warn_colliding_git_sources(&mut self, id: SourceId) -> CargoResult<()> { - let config = match self.config { - Some(config) => config, - None => return Ok(()), - }; - let prev = match self.warned_git_collisions.replace(id) { - Some(prev) => prev, - None => return Ok(()), - }; - match (id.git_reference(), prev.git_reference()) { - (Some(GitReference::DefaultBranch), Some(GitReference::Branch(b))) - | (Some(GitReference::Branch(b)), Some(GitReference::DefaultBranch)) - if b == "master" => {} - _ => return Ok(()), - } - - config.shell().warn(&format!( - "two git dependencies found for `{}` \ - where one uses `branch = \"master\"` and the other doesn't; \ - this will break in a future version of Cargo, so please \ - ensure the dependency forms are consistent", - id.url(), - ))?; - Ok(()) - } - /// Queries the `registry` to return a list of candidates for `dep`. /// /// This method is the location where overrides are taken into account. If @@ -120,7 +77,6 @@ /// applied by performing a second query for what the override should /// return. pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { - self.warn_colliding_git_sources(dep.source_id())?; if let Some(out) = self.registry_cache.get(dep).cloned() { return Ok(out); } @@ -268,8 +224,8 @@ let mut deps = deps .into_iter() .map(|(dep, features)| { - let candidates = self.query(&dep).chain_err(|| { - anyhow::format_err!( + let candidates = self.query(&dep).with_context(|| { + format!( "failed to get `{}` as a dependency of {}", dep.package_name(), describe_path(&cx.parents.path_to_bottom(&candidate.package_id())), @@ -329,15 +285,6 @@ .unwrap_or(&default_dep) .clone(); base.extend(dep.features().iter()); - for feature in base.iter() { - if feature.contains('/') { - return Err(anyhow::format_err!( - "feature names may not contain slashes: `{}`", - feature - ) - .into()); - } - } ret.push((dep.clone(), Rc::new(base))); } @@ -365,30 +312,46 @@ ) -> ActivateResult> { let mut reqs = Requirements::new(s); - if opts.features.all_features { - for key in s.features().keys() { - if let Err(e) = reqs.require_feature(*key) { + let handle_default = |uses_default_features, reqs: &mut Requirements<'_>| { + if uses_default_features && s.features().contains_key("default") { + if let Err(e) = reqs.require_feature(InternedString::new("default")) { return Err(e.into_activate_error(parent, s)); } } - } else { - for &f in opts.features.features.iter() { - let fv = FeatureValue::new(f); - if fv.has_dep_prefix() { - return Err(ActivateError::Fatal(anyhow::format_err!( - "feature value `{}` is not allowed to use explicit `dep:` syntax", - fv - ))); - } - if let Err(e) = reqs.require_value(&fv) { - return Err(e.into_activate_error(parent, s)); + Ok(()) + }; + + match &opts.features { + RequestedFeatures::CliFeatures(CliFeatures { + features, + all_features, + uses_default_features, + }) => { + if *all_features { + for key in s.features().keys() { + if let Err(e) = reqs.require_feature(*key) { + return Err(e.into_activate_error(parent, s)); + } + } + } else { + for fv in features.iter() { + if let Err(e) = reqs.require_value(fv) { + return Err(e.into_activate_error(parent, s)); + } + } + handle_default(*uses_default_features, &mut reqs)?; } } - } - - if opts.features.uses_default_features && s.features().contains_key("default") { - if let Err(e) = reqs.require_feature(InternedString::new("default")) { - return Err(e.into_activate_error(parent, s)); + RequestedFeatures::DepFeatures { + features, + uses_default_features, + } => { + for feature in features.iter() { + if let Err(e) = reqs.require_feature(*feature) { + return Err(e.into_activate_error(parent, s)); + } + } + handle_default(*uses_default_features, &mut reqs)?; } } diff -Nru cargo-0.53.0/src/cargo/core/resolver/encode.rs cargo-0.54.0/src/cargo/core/resolver/encode.rs --- cargo-0.53.0/src/cargo/core/resolver/encode.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/encode.rs 2021-04-27 14:35:53.000000000 +0000 @@ -113,10 +113,10 @@ use super::{Resolve, ResolveVersion}; use crate::core::{Dependency, GitReference, Package, PackageId, SourceId, Workspace}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{internal, Graph}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use log::debug; use serde::de; use serde::ser; @@ -333,7 +333,7 @@ let k = &k[prefix.len()..]; let enc_id: EncodablePackageId = k .parse() - .chain_err(|| internal("invalid encoding of checksum in lockfile"))?; + .with_context(|| internal("invalid encoding of checksum in lockfile"))?; let id = match lookup_id(&enc_id) { Some(id) => id, _ => continue, diff -Nru cargo-0.53.0/src/cargo/core/resolver/features.rs cargo-0.54.0/src/cargo/core/resolver/features.rs --- cargo-0.53.0/src/cargo/core/resolver/features.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -42,6 +42,7 @@ use crate::core::{FeatureValue, PackageId, PackageIdSpec, PackageSet, Workspace}; use crate::util::interning::InternedString; use crate::util::CargoResult; +use anyhow::bail; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::rc::Rc; @@ -144,7 +145,7 @@ } "compare" => opts.compare = true, "ws" => unimplemented!(), - s => anyhow::bail!("-Zfeatures flag `{}` is not supported", s), + s => bail!("-Zfeatures flag `{}` is not supported", s), } } Ok(()) @@ -197,44 +198,93 @@ } /// Features flags requested for a package. +/// +/// This should be cheap and fast to clone, it is used in the resolver for +/// various caches. +/// +/// This is split into enum variants because the resolver needs to handle +/// features coming from different places (command-line and dependency +/// declarations), but those different places have different constraints on +/// which syntax is allowed. This helps ensure that every place dealing with +/// features is properly handling those syntax restrictions. #[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct RequestedFeatures { - pub features: FeaturesSet, +pub enum RequestedFeatures { + /// Features requested on the command-line with flags. + CliFeatures(CliFeatures), + /// Features specified in a dependency declaration. + DepFeatures { + /// The `features` dependency field. + features: FeaturesSet, + /// The `default-features` dependency field. + uses_default_features: bool, + }, +} + +/// Features specified on the command-line. +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct CliFeatures { + /// Features from the `--features` flag. + pub features: Rc>, + /// The `--all-features` flag. pub all_features: bool, + /// Inverse of `--no-default-features` flag. pub uses_default_features: bool, } -impl RequestedFeatures { - /// Creates a new RequestedFeatures from the given command-line flags. +impl CliFeatures { + /// Creates a new CliFeatures from the given command-line flags. pub fn from_command_line( features: &[String], all_features: bool, uses_default_features: bool, - ) -> RequestedFeatures { - RequestedFeatures { - features: Rc::new(RequestedFeatures::split_features(features)), + ) -> CargoResult { + let features = Rc::new(CliFeatures::split_features(features)); + // Some early validation to ensure correct syntax. + for feature in features.iter() { + match feature { + // Maybe call validate_feature_name here once it is an error? + FeatureValue::Feature(_) => {} + FeatureValue::Dep { .. } + | FeatureValue::DepFeature { + dep_prefix: true, .. + } => { + bail!( + "feature `{}` is not allowed to use explicit `dep:` syntax", + feature + ); + } + FeatureValue::DepFeature { dep_feature, .. } => { + if dep_feature.contains('/') { + bail!("multiple slashes in feature `{}` is not allowed", feature); + } + } + } + } + Ok(CliFeatures { + features, all_features, uses_default_features, - } + }) } - /// Creates a new RequestedFeatures with the given `all_features` setting. - pub fn new_all(all_features: bool) -> RequestedFeatures { - RequestedFeatures { + /// Creates a new CliFeatures with the given `all_features` setting. + pub fn new_all(all_features: bool) -> CliFeatures { + CliFeatures { features: Rc::new(BTreeSet::new()), all_features, uses_default_features: true, } } - fn split_features(features: &[String]) -> BTreeSet { + fn split_features(features: &[String]) -> BTreeSet { features .iter() .flat_map(|s| s.split_whitespace()) .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) .map(InternedString::new) - .collect::>() + .map(FeatureValue::new) + .collect() } } @@ -296,7 +346,7 @@ if let Some(fs) = self.activated_features.get(&(pkg_id, is_build)) { Ok(fs.iter().cloned().collect()) } else { - anyhow::bail!("features did not find {:?} {:?}", pkg_id, is_build) + bail!("features did not find {:?} {:?}", pkg_id, is_build) } } } @@ -317,7 +367,7 @@ // The new resolver should never add features. assert_eq!(new_features.difference(&old_features).next(), None); let removed_features: BTreeSet<_> = - old_features.difference(&new_features).cloned().collect(); + old_features.difference(new_features).cloned().collect(); if removed_features.is_empty() { None } else { @@ -336,7 +386,7 @@ }; // The new resolver should never add dependencies. assert_eq!(new_deps.difference(&old_deps).next(), None); - let removed_deps: BTreeSet<_> = old_deps.difference(&new_deps).cloned().collect(); + let removed_deps: BTreeSet<_> = old_deps.difference(new_deps).cloned().collect(); if removed_deps.is_empty() { None } else { @@ -364,7 +414,7 @@ pub struct FeatureResolver<'a, 'cfg> { ws: &'a Workspace<'cfg>, - target_data: &'a RustcTargetData, + target_data: &'a RustcTargetData<'cfg>, /// The platforms to build for, requested by the user. requested_targets: &'a [CompileKind], resolve: &'a Resolve, @@ -402,10 +452,10 @@ /// with the result. pub fn resolve( ws: &Workspace<'cfg>, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, resolve: &Resolve, package_set: &'a PackageSet<'cfg>, - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, specs: &[PackageIdSpec], requested_targets: &[CompileKind], opts: FeatureOpts, @@ -437,7 +487,7 @@ track_for_host, deferred_weak_dependencies: HashMap::new(), }; - r.do_resolve(specs, requested_features)?; + r.do_resolve(specs, cli_features)?; log::debug!("features={:#?}", r.activated_features); if r.opts.compare { r.compare(); @@ -455,11 +505,11 @@ fn do_resolve( &mut self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, ) -> CargoResult<()> { - let member_features = self.ws.members_with_features(specs, requested_features)?; - for (member, requested_features) in &member_features { - let fvs = self.fvs_from_requested(member.package_id(), requested_features); + let member_features = self.ws.members_with_features(specs, cli_features)?; + for (member, cli_features) in &member_features { + let fvs = self.fvs_from_requested(member.package_id(), cli_features); let for_host = self.track_for_host && self.is_proc_macro(member.package_id()); self.activate_pkg(member.package_id(), for_host, &fvs)?; if for_host { @@ -725,24 +775,19 @@ fn fvs_from_requested( &self, pkg_id: PackageId, - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, ) -> Vec { let summary = self.resolve.summary(pkg_id); let feature_map = summary.features(); - if requested_features.all_features { + if cli_features.all_features { feature_map .keys() .map(|k| FeatureValue::Feature(*k)) .collect() } else { - let mut result: Vec = requested_features - .features - .as_ref() - .iter() - .map(|f| FeatureValue::new(*f)) - .collect(); + let mut result: Vec = cli_features.features.iter().cloned().collect(); let default = InternedString::new("default"); - if requested_features.uses_default_features && feature_map.contains_key(&default) { + if cli_features.uses_default_features && feature_map.contains_key(&default) { result.push(FeatureValue::Feature(default)); } result diff -Nru cargo-0.53.0/src/cargo/core/resolver/mod.rs cargo-0.54.0/src/cargo/core/resolver/mod.rs --- cargo-0.53.0/src/cargo/core/resolver/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -69,7 +69,7 @@ pub use self::encode::Metadata; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use self::errors::{ActivateError, ActivateResult, ResolveError}; -pub use self::features::{ForceAllTargets, HasDevUnits}; +pub use self::features::{CliFeatures, ForceAllTargets, HasDevUnits}; pub use self::resolve::{Resolve, ResolveVersion}; pub use self::types::{ResolveBehavior, ResolveOpts}; @@ -133,8 +133,7 @@ Some(config) => config.cli_unstable().minimal_versions, None => false, }; - let mut registry = - RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions, config); + let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions); let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; let mut cksums = HashMap::new(); @@ -193,7 +192,7 @@ // Activate all the initial summaries to kick off some work. for &(ref summary, ref opts) in summaries { debug!("initial activation: {}", summary.package_id()); - let res = activate(&mut cx, registry, None, summary.clone(), opts.clone()); + let res = activate(&mut cx, registry, None, summary.clone(), opts); match res { Ok(Some((frame, _))) => remaining_deps.push(frame), Ok(None) => (), @@ -379,9 +378,8 @@ let pid = candidate.package_id(); let opts = ResolveOpts { dev_deps: false, - features: RequestedFeatures { + features: RequestedFeatures::DepFeatures { features: Rc::clone(&features), - all_features: false, uses_default_features: dep.uses_default_features(), }, }; @@ -392,7 +390,7 @@ dep.package_name(), candidate.version() ); - let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts); + let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, &opts); let successfully_activated = match res { // Success! We've now activated our `candidate` in our context @@ -604,7 +602,7 @@ registry: &mut RegistryQueryer<'_>, parent: Option<(&Summary, &Dependency)>, candidate: Summary, - opts: ResolveOpts, + opts: &ResolveOpts, ) -> ActivateResult> { let candidate_pid = candidate.package_id(); cx.age += 1; @@ -626,7 +624,7 @@ } } - let activated = cx.flag_activated(&candidate, &opts, parent)?; + let activated = cx.flag_activated(&candidate, opts, parent)?; let candidate = match registry.replacement_summary(candidate_pid) { Some(replace) => { @@ -635,7 +633,7 @@ // does. TBH it basically cause panics in the test suite if // `parent` is passed through here and `[replace]` is otherwise // on life support so it's not critical to fix bugs anyway per se. - if cx.flag_activated(replace, &opts, None)? && activated { + if cx.flag_activated(replace, opts, None)? && activated { return Ok(None); } trace!( @@ -656,7 +654,7 @@ let now = Instant::now(); let (used_features, deps) = - &*registry.build_deps(cx, parent.map(|p| p.0.package_id()), &candidate, &opts)?; + &*registry.build_deps(cx, parent.map(|p| p.0.package_id()), &candidate, opts)?; // Record what list of features is active for this package. if !used_features.is_empty() { diff -Nru cargo-0.53.0/src/cargo/core/resolver/resolve.rs cargo-0.54.0/src/cargo/core/resolver/resolve.rs --- cargo-0.53.0/src/cargo/core/resolver/resolve.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/resolve.rs 2021-04-27 14:35:53.000000000 +0000 @@ -409,6 +409,6 @@ /// file anyway so it takes the opportunity to bump the lock file version /// forward. fn default() -> ResolveVersion { - ResolveVersion::V2 + ResolveVersion::V3 } } diff -Nru cargo-0.53.0/src/cargo/core/resolver/types.rs cargo-0.54.0/src/cargo/core/resolver/types.rs --- cargo-0.53.0/src/cargo/core/resolver/types.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/types.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,4 +1,4 @@ -use super::features::RequestedFeatures; +use super::features::{CliFeatures, RequestedFeatures}; use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; @@ -133,6 +133,7 @@ /// Whether or not dev-dependencies should be included. /// /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. + /// It also gets set to `false` when activating dependencies in the resolver. pub dev_deps: bool, /// Set of features requested on the command-line. pub features: RequestedFeatures, @@ -143,7 +144,7 @@ pub fn everything() -> ResolveOpts { ResolveOpts { dev_deps: true, - features: RequestedFeatures::new_all(true), + features: RequestedFeatures::CliFeatures(CliFeatures::new_all(true)), } } diff -Nru cargo-0.53.0/src/cargo/core/source/source_id.rs cargo-0.54.0/src/cargo/core/source/source_id.rs --- cargo-0.53.0/src/cargo/core/source/source_id.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/source/source_id.rs 2021-04-27 14:35:53.000000000 +0000 @@ -42,11 +42,11 @@ /// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the /// source. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] enum SourceKind { /// A git repository. Git(GitReference), - /// A local path.. + /// A local path. Path, /// A remote registry. Registry, @@ -394,45 +394,9 @@ // Sort first based on `kind`, deferring to the URL comparison below if // the kinds are equal. - match (&self.inner.kind, &other.inner.kind) { - (SourceKind::Path, SourceKind::Path) => {} - (SourceKind::Path, _) => return Ordering::Less, - (_, SourceKind::Path) => return Ordering::Greater, - - (SourceKind::Registry, SourceKind::Registry) => {} - (SourceKind::Registry, _) => return Ordering::Less, - (_, SourceKind::Registry) => return Ordering::Greater, - - (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => {} - (SourceKind::LocalRegistry, _) => return Ordering::Less, - (_, SourceKind::LocalRegistry) => return Ordering::Greater, - - (SourceKind::Directory, SourceKind::Directory) => {} - (SourceKind::Directory, _) => return Ordering::Less, - (_, SourceKind::Directory) => return Ordering::Greater, - - (SourceKind::Git(a), SourceKind::Git(b)) => { - use GitReference::*; - let ord = match (a, b) { - (Tag(a), Tag(b)) => a.cmp(b), - (Tag(_), _) => Ordering::Less, - (_, Tag(_)) => Ordering::Greater, - - (Rev(a), Rev(b)) => a.cmp(b), - (Rev(_), _) => Ordering::Less, - (_, Rev(_)) => Ordering::Greater, - - // See module comments in src/cargo/sources/git/utils.rs - // for why `DefaultBranch` is treated specially here. - (Branch(a), DefaultBranch) => a.as_str().cmp("master"), - (DefaultBranch, Branch(b)) => "master".cmp(b), - (Branch(a), Branch(b)) => a.cmp(b), - (DefaultBranch, DefaultBranch) => Ordering::Equal, - }; - if ord != Ordering::Equal { - return ord; - } - } + match self.inner.kind.cmp(&other.inner.kind) { + Ordering::Equal => {} + other => return other, } // If the `kind` and the `url` are equal, then for git sources we also @@ -509,43 +473,9 @@ // The hash of SourceId is used in the name of some Cargo folders, so shouldn't // vary. `as_str` gives the serialisation of a url (which has a spec) and so // insulates against possible changes in how the url crate does hashing. -// -// Note that the semi-funky hashing here is done to handle `DefaultBranch` -// hashing the same as `"master"`, and also to hash the same as previous -// versions of Cargo while it's somewhat convenient to do so (that way all -// versions of Cargo use the same checkout). impl Hash for SourceId { fn hash(&self, into: &mut S) { - match &self.inner.kind { - SourceKind::Git(GitReference::Tag(a)) => { - 0usize.hash(into); - 0usize.hash(into); - a.hash(into); - } - SourceKind::Git(GitReference::Branch(a)) => { - 0usize.hash(into); - 1usize.hash(into); - a.hash(into); - } - // For now hash `DefaultBranch` the same way as `Branch("master")`, - // and for more details see module comments in - // src/cargo/sources/git/utils.rs for why `DefaultBranch` - SourceKind::Git(GitReference::DefaultBranch) => { - 0usize.hash(into); - 1usize.hash(into); - "master".hash(into); - } - SourceKind::Git(GitReference::Rev(a)) => { - 0usize.hash(into); - 2usize.hash(into); - a.hash(into); - } - - SourceKind::Path => 1usize.hash(into), - SourceKind::Registry => 2usize.hash(into), - SourceKind::LocalRegistry => 3usize.hash(into), - SourceKind::Directory => 4usize.hash(into), - } + self.inner.kind.hash(into); match self.inner.kind { SourceKind::Git(_) => self.inner.canonical_url.hash(into), _ => self.inner.url.as_str().hash(into), @@ -553,6 +483,110 @@ } } +// forward to `Ord` +impl PartialOrd for SourceKind { + fn partial_cmp(&self, other: &SourceKind) -> Option { + Some(self.cmp(other)) + } +} + +// Note that this is specifically not derived on `SourceKind` although the +// implementation here is very similar to what it might look like if it were +// otherwise derived. +// +// The reason for this is somewhat obtuse. First of all the hash value of +// `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX` +// which means that changes to the hash means that all Rust users need to +// redownload the crates.io index and all their crates. If possible we strive to +// not change this to make this redownloading behavior happen as little as +// possible. How is this connected to `Ord` you might ask? That's a good +// question! +// +// Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for +// the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522, +// however, the implementation of `Ord` changed. This handwritten implementation +// forgot to sync itself with the originally derived implementation, namely +// placing git dependencies as sorted after all other dependencies instead of +// first as before. +// +// This regression in #8522 (Rust 1.47) went unnoticed. When we switched back +// to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically +// saw an issue (#9334). In #9334 it was observed that stable Rust at the time +// (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort +// git dependencies first. This is because the `PartialOrd` implementation in +// 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52 +// it was (unknowingly) restored to the pre-1.47 behavior with git dependencies +// first. +// +// Because the breakage was only witnessed after the original breakage, this +// trait implementation is preserving the "broken" behavior. Put a different way: +// +// * Rust pre-1.47 sorted git deps first. +// * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that +// was never noticed. +// * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did +// so), and breakage was witnessed by actual users due to difference with +// 1.51. +// * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51 +// behavior (#9383), which is now considered intentionally breaking from the +// pre-1.47 behavior. +// +// Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was +// in beta. #9133 was in both beta and nightly at the time of discovery. For +// 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly +// (1.53) #9397 was created to fix the regression introduced by #9133 relative +// to the current stable (1.51). +// +// That's all a long winded way of saying "it's wierd that git deps hash first +// and are sorted last, but it's the way it is right now". The author of this +// comment chose to handwrite the `Ord` implementation instead of the `Hash` +// implementation, but it's only required that at most one of them is +// hand-written because the other can be derived. Perhaps one day in +// the future someone can figure out how to remove this behavior. +impl Ord for SourceKind { + fn cmp(&self, other: &SourceKind) -> Ordering { + match (self, other) { + (SourceKind::Path, SourceKind::Path) => Ordering::Equal, + (SourceKind::Path, _) => Ordering::Less, + (_, SourceKind::Path) => Ordering::Greater, + + (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal, + (SourceKind::Registry, _) => Ordering::Less, + (_, SourceKind::Registry) => Ordering::Greater, + + (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal, + (SourceKind::LocalRegistry, _) => Ordering::Less, + (_, SourceKind::LocalRegistry) => Ordering::Greater, + + (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal, + (SourceKind::Directory, _) => Ordering::Less, + (_, SourceKind::Directory) => Ordering::Greater, + + (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b), + } + } +} + +// This is a test that the hash of the `SourceId` for crates.io is a well-known +// value. +// +// Note that the hash value matches what the crates.io source id has hashed +// since long before Rust 1.30. We strive to keep this value the same across +// versions of Cargo because changing it means that users will need to +// redownload the index and all crates they use when using a new Cargo version. +// +// This isn't to say that this hash can *never* change, only that when changing +// this it should be explicitly done. If this hash changes accidentally and +// you're able to restore the hash to its original value, please do so! +// Otherwise please just leave a comment in your PR as to why the hash value is +// changing and why the old value can't be easily preserved. +#[test] +fn test_cratesio_hash() { + let config = Config::default().unwrap(); + let crates_io = SourceId::crates_io(&config).unwrap(); + assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); +} + /// A `Display`able view into a `SourceId` that will write it as a url pub struct SourceIdAsUrl<'a> { inner: &'a SourceIdInner, diff -Nru cargo-0.53.0/src/cargo/core/summary.rs cargo-0.54.0/src/cargo/core/summary.rs --- cargo-0.53.0/src/cargo/core/summary.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/summary.rs 2021-04-27 14:35:53.000000000 +0000 @@ -250,6 +250,12 @@ feature ); } + if feature.contains('/') { + bail!( + "feature named `{}` is not allowed to contain slashes", + feature + ); + } validate_feature_name(config, pkg_id, feature)?; for fv in fvs { // Find data for the referenced dependency... @@ -316,7 +322,20 @@ ); } } - DepFeature { dep_name, weak, .. } => { + DepFeature { + dep_name, + dep_feature, + weak, + .. + } => { + // Early check for some unlikely syntax. + if dep_feature.contains('/') { + bail!( + "multiple slashes in feature `{}` (included by feature `{}`) are not allowed", + fv, + feature + ); + } // Validation of the feature name will be performed in the resolver. if !is_any_dep { bail!( @@ -362,7 +381,7 @@ } /// FeatureValue represents the types of dependencies a feature can have. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub enum FeatureValue { /// A feature enabling another feature. Feature(InternedString), diff -Nru cargo-0.53.0/src/cargo/core/workspace.rs cargo-0.54.0/src/cargo/core/workspace.rs --- cargo-0.53.0/src/cargo/core/workspace.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/workspace.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,23 +5,24 @@ use std::rc::Rc; use std::slice; +use anyhow::{bail, Context as _}; use glob::glob; use log::debug; use url::Url; use crate::core::features::Features; use crate::core::registry::PackageRegistry; -use crate::core::resolver::features::RequestedFeatures; +use crate::core::resolver::features::CliFeatures; use crate::core::resolver::ResolveBehavior; -use crate::core::{Dependency, Edition, PackageId, PackageIdSpec}; +use crate::core::{Dependency, Edition, FeatureValue, PackageId, PackageIdSpec}; use crate::core::{EitherManifest, Package, SourceId, VirtualManifest}; use crate::ops; use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; +use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; -use crate::util::paths; use crate::util::toml::{read_manifest, TomlDependency, TomlProfiles}; use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl}; +use cargo_util::paths; /// The core abstraction in Cargo for working with a workspace of crates. /// @@ -152,7 +153,7 @@ ws.target_dir = config.target_dir()?; if manifest_path.is_relative() { - anyhow::bail!( + bail!( "manifest_path:{:?} is not an absolute path. Please provide an absolute path.", manifest_path ) @@ -385,7 +386,7 @@ .config .get_registry_index(url) .or_else(|_| url.into_url()) - .chain_err(|| { + .with_context(|| { format!("[patch] entry `{}` should be a URL or registry name", url) })?, }; @@ -403,7 +404,7 @@ /* platform */ None, // NOTE: Since we use ConfigRelativePath, this root isn't used as // any relative paths are resolved before they'd be joined with root. - &Path::new("unused-relative-path"), + Path::new("unused-relative-path"), self.unstable_features(), /* kind */ None, ) @@ -435,7 +436,7 @@ return Ok(from_manifest.clone()); } if from_manifest.is_empty() { - return Ok(from_config.clone()); + return Ok(from_config); } // We could just chain from_manifest and from_config, @@ -523,7 +524,7 @@ return Ok(Some(root_config.clone())); } - _ => anyhow::bail!( + _ => bail!( "root of a workspace inferred but wasn't a root: {}", root_path.display() ), @@ -644,7 +645,13 @@ }; for path in &members_paths { - self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)?; + self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false) + .with_context(|| { + format!( + "failed to load manifest for workspace member `{}`", + path.display() + ) + })?; } if let Some(default) = default_members_paths { @@ -663,7 +670,7 @@ if exclude { continue; } - anyhow::bail!( + bail!( "package `{}` is listed in workspace’s default-members \ but is not a member.", path.display() @@ -718,14 +725,15 @@ self.member_ids.insert(pkg.package_id()); pkg.dependencies() .iter() - .map(|d| d.source_id()) - .filter(|d| d.is_path()) - .filter_map(|d| d.url().to_file_path().ok()) - .map(|p| p.join("Cargo.toml")) + .map(|d| (d.source_id(), d.package_name())) + .filter(|(s, _)| s.is_path()) + .filter_map(|(s, n)| s.url().to_file_path().ok().map(|p| (p, n))) + .map(|(p, n)| (p.join("Cargo.toml"), n)) .collect::>() }; - for candidate in candidates { - self.find_path_deps(&candidate, root_manifest, true) + for (path, name) in candidates { + self.find_path_deps(&path, root_manifest, true) + .with_context(|| format!("failed to load manifest for dependency `{}`", name)) .map_err(|err| ManifestError::new(err, manifest_path.clone()))?; } Ok(()) @@ -785,7 +793,7 @@ MaybePackage::Virtual(_) => continue, }; if let Some(prev) = names.insert(name, member) { - anyhow::bail!( + bail!( "two packages named `{}` in this workspace:\n\ - {}\n\ - {}", @@ -810,7 +818,7 @@ .collect(); match roots.len() { 1 => Ok(()), - 0 => anyhow::bail!( + 0 => bail!( "`package.workspace` configuration points to a crate \ which is not configured with [workspace]: \n\ configuration at: {}\n\ @@ -819,7 +827,7 @@ self.root_manifest.as_ref().unwrap().display() ), _ => { - anyhow::bail!( + bail!( "multiple workspace roots found in the same workspace:\n{}", roots .iter() @@ -840,7 +848,7 @@ match root { Some(root) => { - anyhow::bail!( + bail!( "package `{}` is a member of the wrong workspace\n\ expected: {}\n\ actual: {}", @@ -850,7 +858,7 @@ ); } None => { - anyhow::bail!( + bail!( "workspace member `{}` is not hierarchically below \ the workspace root `{}`", member.display(), @@ -907,7 +915,7 @@ } } }; - anyhow::bail!( + bail!( "current package believes it's in a workspace when it's not:\n\ current: {}\n\ workspace: {}\n\n{}\n\ @@ -963,7 +971,7 @@ pub fn load(&self, manifest_path: &Path) -> CargoResult { match self.packages.maybe_get(manifest_path) { Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), - Some(&MaybePackage::Virtual(_)) => anyhow::bail!("cannot load workspace root"), + Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"), None => {} } @@ -1046,10 +1054,10 @@ pub fn members_with_features( &self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - ) -> CargoResult> { + cli_features: &CliFeatures, + ) -> CargoResult> { assert!( - !specs.is_empty() || requested_features.all_features, + !specs.is_empty() || cli_features.all_features, "no specs requires all_features" ); if specs.is_empty() { @@ -1057,13 +1065,13 @@ // all features enabled. return Ok(self .members() - .map(|m| (m, RequestedFeatures::new_all(true))) + .map(|m| (m, CliFeatures::new_all(true))) .collect()); } if self.allows_new_cli_feature_behavior() { - self.members_with_features_new(specs, requested_features) + self.members_with_features_new(specs, cli_features) } else { - Ok(self.members_with_features_old(specs, requested_features)) + Ok(self.members_with_features_old(specs, cli_features)) } } @@ -1072,17 +1080,17 @@ fn members_with_features_new( &self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - ) -> CargoResult> { + cli_features: &CliFeatures, + ) -> CargoResult> { // Keep track of which features matched *any* member, to produce an error // if any of them did not match anywhere. - let mut found: BTreeSet = BTreeSet::new(); + let mut found: BTreeSet = BTreeSet::new(); // Returns the requested features for the given member. // This filters out any named features that the member does not have. - let mut matching_features = |member: &Package| -> RequestedFeatures { - if requested_features.features.is_empty() || requested_features.all_features { - return requested_features.clone(); + let mut matching_features = |member: &Package| -> CliFeatures { + if cli_features.features.is_empty() || cli_features.all_features { + return cli_features.clone(); } // Only include features this member defines. let summary = member.summary(); @@ -1098,40 +1106,54 @@ .any(|dep| dep.is_optional() && dep.name_in_toml() == feature) }; - for feature in requested_features.features.iter() { - let mut split = feature.splitn(2, '/'); - let split = (split.next().unwrap(), split.next()); - if let (pkg, Some(pkg_feature)) = split { - let pkg = InternedString::new(pkg); - let pkg_feature = InternedString::new(pkg_feature); - if summary - .dependencies() - .iter() - .any(|dep| dep.name_in_toml() == pkg) - { - // pkg/feat for a dependency. - // Will rely on the dependency resolver to validate `feat`. - features.insert(*feature); - found.insert(*feature); - } else if pkg == member.name() && contains(pkg_feature) { - // member/feat where "feat" is a feature in member. - features.insert(pkg_feature); - found.insert(*feature); + for feature in cli_features.features.iter() { + match feature { + FeatureValue::Feature(f) => { + if contains(*f) { + // feature exists in this member. + features.insert(feature.clone()); + found.insert(feature.clone()); + } + } + // This should be enforced by CliFeatures. + FeatureValue::Dep { .. } + | FeatureValue::DepFeature { + dep_prefix: true, .. + } => panic!("unexpected dep: syntax {}", feature), + FeatureValue::DepFeature { + dep_name, + dep_feature, + dep_prefix: _, + weak: _, + } => { + if summary + .dependencies() + .iter() + .any(|dep| dep.name_in_toml() == *dep_name) + { + // pkg/feat for a dependency. + // Will rely on the dependency resolver to validate `dep_feature`. + features.insert(feature.clone()); + found.insert(feature.clone()); + } else if *dep_name == member.name() && contains(*dep_feature) { + // member/feat where "feat" is a feature in member. + // + // `weak` can be ignored here, because the member + // either is or isn't being built. + features.insert(FeatureValue::Feature(*dep_feature)); + found.insert(feature.clone()); + } } - } else if contains(*feature) { - // feature exists in this member. - features.insert(*feature); - found.insert(*feature); } } - RequestedFeatures { + CliFeatures { features: Rc::new(features), all_features: false, - uses_default_features: requested_features.uses_default_features, + uses_default_features: cli_features.uses_default_features, } }; - let members: Vec<(&Package, RequestedFeatures)> = self + let members: Vec<(&Package, CliFeatures)> = self .members() .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) .map(|m| (m, matching_features(m))) @@ -1139,27 +1161,28 @@ if members.is_empty() { // `cargo build -p foo`, where `foo` is not a member. // Do not allow any command-line flags (defaults only). - if !(requested_features.features.is_empty() - && !requested_features.all_features - && requested_features.uses_default_features) + if !(cli_features.features.is_empty() + && !cli_features.all_features + && cli_features.uses_default_features) { - anyhow::bail!("cannot specify features for packages outside of workspace"); + bail!("cannot specify features for packages outside of workspace"); } // Add all members from the workspace so we can ensure `-p nonmember` // is in the resolve graph. return Ok(self .members() - .map(|m| (m, RequestedFeatures::new_all(false))) + .map(|m| (m, CliFeatures::new_all(false))) .collect()); } - if *requested_features.features != found { - let missing: Vec<_> = requested_features + if *cli_features.features != found { + let mut missing: Vec<_> = cli_features .features .difference(&found) - .copied() + .map(|fv| fv.to_string()) .collect(); + missing.sort(); // TODO: typo suggestions would be good here. - anyhow::bail!( + bail!( "none of the selected packages contains these features: {}", missing.join(", ") ); @@ -1172,28 +1195,46 @@ fn members_with_features_old( &self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - ) -> Vec<(&Package, RequestedFeatures)> { + cli_features: &CliFeatures, + ) -> Vec<(&Package, CliFeatures)> { // Split off any features with the syntax `member-name/feature-name` into a map // so that those features can be applied directly to those workspace-members. - let mut member_specific_features: HashMap<&str, BTreeSet> = HashMap::new(); + let mut member_specific_features: HashMap> = + HashMap::new(); // Features for the member in the current directory. let mut cwd_features = BTreeSet::new(); - for feature in requested_features.features.iter() { - if let Some(index) = feature.find('/') { - let name = &feature[..index]; - let is_member = self.members().any(|member| member.name() == name); - if is_member && specs.iter().any(|spec| spec.name() == name) { - member_specific_features - .entry(name) - .or_default() - .insert(InternedString::new(&feature[index + 1..])); - } else { - cwd_features.insert(*feature); + for feature in cli_features.features.iter() { + match feature { + FeatureValue::Feature(_) => { + cwd_features.insert(feature.clone()); } - } else { - cwd_features.insert(*feature); - }; + // This should be enforced by CliFeatures. + FeatureValue::Dep { .. } + | FeatureValue::DepFeature { + dep_prefix: true, .. + } => panic!("unexpected dep: syntax {}", feature), + FeatureValue::DepFeature { + dep_name, + dep_feature, + dep_prefix: _, + weak: _, + } => { + // I think weak can be ignored here. + // * With `--features member?/feat -p member`, the ? doesn't + // really mean anything (either the member is built or it isn't). + // * With `--features nonmember?/feat`, cwd_features will + // handle processing it correctly. + let is_member = self.members().any(|member| member.name() == *dep_name); + if is_member && specs.iter().any(|spec| spec.name() == *dep_name) { + member_specific_features + .entry(*dep_name) + .or_default() + .insert(FeatureValue::Feature(*dep_feature)); + } else { + cwd_features.insert(feature.clone()); + } + } + } } let ms = self.members().filter_map(|member| { @@ -1202,10 +1243,10 @@ // The features passed on the command-line only apply to // the "current" package (determined by the cwd). Some(current) if member_id == current.package_id() => { - let feats = RequestedFeatures { + let feats = CliFeatures { features: Rc::new(cwd_features.clone()), - all_features: requested_features.all_features, - uses_default_features: requested_features.uses_default_features, + all_features: cli_features.all_features, + uses_default_features: cli_features.uses_default_features, }; Some((member, feats)) } @@ -1221,14 +1262,14 @@ // "current" package. As an extension, this allows // member-name/feature-name to set member-specific // features, which should be backwards-compatible. - let feats = RequestedFeatures { + let feats = CliFeatures { features: Rc::new( member_specific_features .remove(member.name().as_str()) .unwrap_or_default(), ), uses_default_features: true, - all_features: requested_features.all_features, + all_features: cli_features.all_features, }; Some((member, feats)) } else { @@ -1381,12 +1422,9 @@ Some(p) => p, None => return Ok(Vec::new()), }; - let res = - glob(path).chain_err(|| anyhow::format_err!("could not parse pattern `{}`", &path))?; + let res = glob(path).with_context(|| format!("could not parse pattern `{}`", &path))?; let res = res - .map(|p| { - p.chain_err(|| anyhow::format_err!("unable to match path to pattern `{}`", &path)) - }) + .map(|p| p.with_context(|| format!("unable to match path to pattern `{}`", &path))) .collect::, _>>()?; Ok(res) } diff -Nru cargo-0.53.0/src/cargo/lib.rs cargo-0.54.0/src/cargo/lib.rs --- cargo-0.53.0/src/cargo/lib.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,34 +1,12 @@ -#![cfg_attr(test, deny(warnings))] -// While we're getting used to 2018: +// For various reasons, some idioms are still allow'ed, but we would like to +// test and enforce them. #![warn(rust_2018_idioms)] -// Clippy isn't enforced by CI (@alexcrichton isn't a fan). -#![allow(clippy::blacklisted_name)] // frequently used in tests -#![allow(clippy::cognitive_complexity)] // large project -#![allow(clippy::derive_hash_xor_eq)] // there's an intentional incoherence -#![allow(clippy::explicit_into_iter_loop)] // explicit loops are clearer -#![allow(clippy::explicit_iter_loop)] // explicit loops are clearer -#![allow(clippy::identity_op)] // used for vertical alignment -#![allow(clippy::implicit_hasher)] // large project -#![allow(clippy::large_enum_variant)] // large project -#![allow(clippy::new_without_default)] // explicit is maybe clearer -#![allow(clippy::redundant_closure)] // closures can be less verbose -#![allow(clippy::redundant_closure_call)] // closures over try catch blocks -#![allow(clippy::too_many_arguments)] // large project -#![allow(clippy::type_complexity)] // there's an exceptionally complex type -#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy? -#![allow(clippy::write_with_newline)] // too pedantic -#![allow(clippy::inefficient_to_string)] // this causes suggestions that result in `(*s).to_string()` -#![allow(clippy::collapsible_if)] // too pedantic +#![cfg_attr(test, deny(warnings))] +// Due to some of the default clippy lints being somewhat subjective and not +// necessarily an improvement, we prefer to not use them at this time. +#![allow(clippy::all)] #![warn(clippy::needless_borrow)] -// Unit is now interned, and would probably be better as pass-by-copy, but -// doing so causes a lot of & and * shenanigans that makes the code arguably -// less clear and harder to read. -#![allow(clippy::trivially_copy_pass_by_ref)] -// exhaustively destructuring ensures future fields are handled -#![allow(clippy::unneeded_field_pattern)] -// false positives in target-specific code, for details see -// https://github.com/rust-lang/cargo/pull/7251#pullrequestreview-274914270 -#![allow(clippy::useless_conversion)] +#![warn(clippy::redundant_clone)] use crate::core::shell::Verbosity::Verbose; use crate::core::Shell; diff -Nru cargo-0.53.0/src/cargo/ops/cargo_clean.rs cargo-0.54.0/src/cargo/ops/cargo_clean.rs --- cargo-0.53.0/src/cargo/ops/cargo_clean.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_clean.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,11 +2,13 @@ use crate::core::profiles::Profiles; use crate::core::{PackageIdSpec, TargetKind, Workspace}; use crate::ops; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::lev_distance; -use crate::util::paths; use crate::util::Config; + +use anyhow::Context as _; +use cargo_util::paths; use std::fs; use std::path::Path; @@ -222,14 +224,12 @@ config .shell() .verbose(|shell| shell.status("Removing", path.display()))?; - paths::remove_dir_all(path) - .chain_err(|| anyhow::format_err!("could not remove build directory"))?; + paths::remove_dir_all(path).with_context(|| "could not remove build directory")?; } else if m.is_ok() { config .shell() .verbose(|shell| shell.status("Removing", path.display()))?; - paths::remove_file(path) - .chain_err(|| anyhow::format_err!("failed to remove build artifact"))?; + paths::remove_file(path).with_context(|| "failed to remove build artifact")?; } Ok(()) } diff -Nru cargo-0.53.0/src/cargo/ops/cargo_compile.rs cargo-0.54.0/src/cargo/ops/cargo_compile.rs --- cargo-0.53.0/src/cargo/ops/cargo_compile.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_compile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -33,8 +33,8 @@ use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{self, FeaturesFor, RequestedFeatures}; -use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts}; +use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; +use crate::core::resolver::{HasDevUnits, Resolve}; use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target}; use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace}; use crate::drop_println; @@ -59,12 +59,8 @@ pub struct CompileOptions { /// Configuration information for a rustc build pub build_config: BuildConfig, - /// Extra features to build for the root package - pub features: Vec, - /// Flag whether all available features should be built for the root package - pub all_features: bool, - /// Flag if the default feature should be built for the root package - pub no_default_features: bool, + /// Feature flags requested by the user. + pub cli_features: CliFeatures, /// A set of packages to build. pub spec: Packages, /// Filter to apply to the root package to select which targets will be @@ -89,9 +85,7 @@ pub fn new(config: &Config, mode: CompileMode) -> CargoResult { Ok(CompileOptions { build_config: BuildConfig::new(config, None, &[], mode)?, - features: Vec::new(), - all_features: false, - no_default_features: false, + cli_features: CliFeatures::new_all(false), spec: ops::Packages::Packages(Vec::new()), filter: CompileFilter::Default { required_features_filterable: false, @@ -334,9 +328,7 @@ let CompileOptions { ref build_config, ref spec, - ref features, - all_features, - no_default_features, + ref cli_features, ref filter, ref target_rustdoc_args, ref target_rustc_args, @@ -372,11 +364,6 @@ let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; let specs = spec.to_package_id_specs(ws)?; - let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode); - let opts = ResolveOpts::new( - dev_deps, - RequestedFeatures::from_command_line(features, all_features, !no_default_features), - ); let has_dev_units = if filter.need_dev_deps(build_config.mode) { HasDevUnits::Yes } else { @@ -386,7 +373,7 @@ ws, &target_data, &build_config.requested_kinds, - &opts, + cli_features, &specs, has_dev_units, crate::core::resolver::features::ForceAllTargets::No, @@ -480,11 +467,17 @@ }) .collect(); + // Passing `build_config.requested_kinds` instead of + // `explicit_host_kinds` here so that `generate_targets` can do + // its own special handling of `CompileKind::Host`. It will + // internally replace the host kind by the `explicit_host_kind` + // before setting as a unit. let mut units = generate_targets( ws, &to_builds, filter, - &explicit_host_kinds, + &build_config.requested_kinds, + explicit_host_kind, build_config.mode, &resolve, &workspace_resolve, @@ -804,12 +797,13 @@ } pub fn is_all_targets(&self) -> bool { - match *self { + matches!( + *self, CompileFilter::Only { - all_targets: true, .. - } => true, - _ => false, - } + all_targets: true, + .. + } + ) } pub(crate) fn contains_glob_patterns(&self) -> bool { @@ -854,6 +848,7 @@ packages: &[&Package], filter: &CompileFilter, requested_kinds: &[CompileKind], + explicit_host_kind: CompileKind, mode: CompileMode, resolve: &Resolve, workspace_resolve: &Option, @@ -922,19 +917,40 @@ }; let is_local = pkg.package_id().source_id().is_path(); - let profile = profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - is_local, - unit_for, - target_mode, - ); // No need to worry about build-dependencies, roots are never build dependencies. let features_for = FeaturesFor::from_for_host(target.proc_macro()); let features = resolved_features.activated_features(pkg.package_id(), features_for); - for kind in requested_kinds { + // If `--target` has not been specified, then the unit + // graph is built almost like if `--target $HOST` was + // specified. See `rebuild_unit_graph_shared` for more on + // why this is done. However, if the package has its own + // `package.target` key, then this gets used instead of + // `$HOST` + let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() { + vec![k] + } else { + requested_kinds + .iter() + .map(|kind| match kind { + CompileKind::Host => { + pkg.manifest().default_kind().unwrap_or(explicit_host_kind) + } + CompileKind::Target(t) => CompileKind::Target(*t), + }) + .collect() + }; + + for kind in explicit_kinds.iter() { + let profile = profiles.get_profile( + pkg.package_id(), + ws.is_member(pkg), + is_local, + unit_for, + target_mode, + *kind, + ); let unit = interner.intern( pkg, target, diff -Nru cargo-0.53.0/src/cargo/ops/cargo_config.rs cargo-0.54.0/src/cargo/ops/cargo_config.rs --- cargo-0.53.0/src/cargo/ops/cargo_config.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,308 @@ +//! Implementation of `cargo config` subcommand. + +use crate::util::config::{Config, ConfigKey, ConfigValue as CV, Definition}; +use crate::util::errors::CargoResult; +use crate::{drop_eprintln, drop_println}; +use anyhow::{bail, format_err, Error}; +use serde_json::json; +use std::borrow::Cow; +use std::fmt; +use std::str::FromStr; + +pub enum ConfigFormat { + Toml, + Json, + JsonValue, +} + +impl ConfigFormat { + /// For clap. + pub const POSSIBLE_VALUES: &'static [&'static str] = &["toml", "json", "json-value"]; +} + +impl FromStr for ConfigFormat { + type Err = Error; + fn from_str(s: &str) -> CargoResult { + match s { + "toml" => Ok(ConfigFormat::Toml), + "json" => Ok(ConfigFormat::Json), + "json-value" => Ok(ConfigFormat::JsonValue), + f => bail!("unknown config format `{}`", f), + } + } +} + +impl fmt::Display for ConfigFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + ConfigFormat::Toml => write!(f, "toml"), + ConfigFormat::Json => write!(f, "json"), + ConfigFormat::JsonValue => write!(f, "json-value"), + } + } +} + +/// Options for `cargo config get`. +pub struct GetOptions<'a> { + pub key: Option<&'a str>, + pub format: ConfigFormat, + pub show_origin: bool, + pub merged: bool, +} + +pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { + if opts.show_origin { + if !matches!(opts.format, ConfigFormat::Toml) { + bail!( + "the `{}` format does not support --show-origin, try the `toml` format instead", + opts.format + ); + } + } + let key = match opts.key { + Some(key) => ConfigKey::from_str(key), + None => ConfigKey::new(), + }; + if opts.merged { + let cv = config + .get_cv_with_env(&key)? + .ok_or_else(|| format_err!("config value `{}` is not set", key))?; + match opts.format { + ConfigFormat::Toml => print_toml(config, opts, &key, &cv), + ConfigFormat::Json => print_json(config, &key, &cv, true), + ConfigFormat::JsonValue => print_json(config, &key, &cv, false), + } + if let Some(env) = maybe_env(config, &key, &cv) { + match opts.format { + ConfigFormat::Toml => print_toml_env(config, &env), + ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(config, &env), + } + } + } else { + match &opts.format { + ConfigFormat::Toml => print_toml_unmerged(config, opts, &key)?, + format => bail!( + "the `{}` format does not support --merged=no, try the `toml` format instead", + format + ), + } + } + Ok(()) +} + +/// Checks for environment variables that might be used. +fn maybe_env<'config>( + config: &'config Config, + key: &ConfigKey, + cv: &CV, +) -> Option> { + // Only fetching a table is unable to load env values. Leaf entries should + // work properly. + match cv { + CV::Table(_map, _def) => {} + _ => return None, + } + let mut env: Vec<_> = config + .env() + .iter() + .filter(|(env_key, _val)| env_key.starts_with(&format!("{}_", key.as_env_key()))) + .collect(); + env.sort_by_key(|x| x.0); + if env.is_empty() { + None + } else { + Some(env) + } +} + +fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) { + let origin = |def: &Definition| -> String { + if !opts.show_origin { + return "".to_string(); + } + format!(" # {}", def) + }; + match cv { + CV::Boolean(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), + CV::Integer(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), + CV::String(val, def) => drop_println!( + config, + "{} = {}{}", + key, + toml::to_string(&val).unwrap(), + origin(def) + ), + CV::List(vals, _def) => { + if opts.show_origin { + drop_println!(config, "{} = [", key); + for (val, def) in vals { + drop_println!(config, " {}, # {}", toml::to_string(&val).unwrap(), def); + } + drop_println!(config, "]"); + } else { + let vals: Vec<&String> = vals.iter().map(|x| &x.0).collect(); + drop_println!(config, "{} = {}", key, toml::to_string(&vals).unwrap()); + } + } + CV::Table(table, _def) => { + let mut key_vals: Vec<_> = table.iter().collect(); + key_vals.sort_by(|a, b| a.0.cmp(b.0)); + for (table_key, val) in key_vals { + let mut subkey = key.clone(); + // push or push_sensitive shouldn't matter here, since this is + // not dealing with environment variables. + subkey.push(table_key); + print_toml(config, opts, &subkey, val); + } + } + } +} + +fn print_toml_env(config: &Config, env: &[(&String, &String)]) { + drop_println!( + config, + "# The following environment variables may affect the loaded values." + ); + for (env_key, env_value) in env { + let val = shell_escape::escape(Cow::Borrowed(env_value)); + drop_println!(config, "# {}={}", env_key, val); + } +} + +fn print_json_env(config: &Config, env: &[(&String, &String)]) { + drop_eprintln!( + config, + "note: The following environment variables may affect the loaded values." + ); + for (env_key, env_value) in env { + let val = shell_escape::escape(Cow::Borrowed(env_value)); + drop_eprintln!(config, "{}={}", env_key, val); + } +} + +fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { + let json_value = if key.is_root() || !include_key { + cv_to_json(cv) + } else { + let mut parts: Vec<_> = key.parts().collect(); + let last_part = parts.pop().unwrap(); + let mut root_table = json!({}); + // Create a JSON object with nested keys up to the value being displayed. + let mut table = &mut root_table; + for part in parts { + table[part] = json!({}); + table = table.get_mut(part).unwrap(); + } + table[last_part] = cv_to_json(cv); + root_table + }; + drop_println!(config, "{}", serde_json::to_string(&json_value).unwrap()); + + // Helper for recursively converting a CV to JSON. + fn cv_to_json(cv: &CV) -> serde_json::Value { + match cv { + CV::Boolean(val, _def) => json!(val), + CV::Integer(val, _def) => json!(val), + CV::String(val, _def) => json!(val), + CV::List(vals, _def) => { + let jvals: Vec<_> = vals.iter().map(|(val, _def)| json!(val)).collect(); + json!(jvals) + } + CV::Table(map, _def) => { + let mut table = json!({}); + for (key, val) in map { + table[key] = cv_to_json(val); + } + table + } + } + } +} + +fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) -> CargoResult<()> { + let print_table = |cv: &CV| { + drop_println!(config, "# {}", cv.definition()); + print_toml(config, opts, &ConfigKey::new(), cv); + drop_println!(config, ""); + }; + // This removes entries from the given CV so that all that remains is the + // given key. Returns false if no entries were found. + fn trim_cv(mut cv: &mut CV, key: &ConfigKey) -> CargoResult { + for (i, part) in key.parts().enumerate() { + match cv { + CV::Table(map, _def) => { + map.retain(|key, _value| key == part); + match map.get_mut(part) { + Some(val) => cv = val, + None => return Ok(false), + } + } + _ => { + let mut key_so_far = ConfigKey::new(); + for part in key.parts().take(i) { + key_so_far.push(part); + } + bail!( + "expected table for configuration key `{}`, \ + but found {} in {}", + key_so_far, + cv.desc(), + cv.definition() + ) + } + } + } + Ok(match cv { + CV::Table(map, _def) => !map.is_empty(), + _ => true, + }) + } + + let mut cli_args = config.cli_args_as_table()?; + if trim_cv(&mut cli_args, key)? { + print_table(&cli_args); + } + + // This slurps up some extra env vars that aren't technically part of the + // "config" (or are special-cased). I'm personally fine with just keeping + // them here, though it might be confusing. The vars I'm aware of: + // + // * CARGO + // * CARGO_HOME + // * CARGO_NAME + // * CARGO_EMAIL + // * CARGO_INCREMENTAL + // * CARGO_TARGET_DIR + // * CARGO_CACHE_RUSTC_INFO + // + // All of these except CARGO, CARGO_HOME, and CARGO_CACHE_RUSTC_INFO are + // actually part of the config, but they are special-cased in the code. + // + // TODO: It might be a good idea to teach the Config loader to support + // environment variable aliases so that these special cases are less + // special, and will just naturally get loaded as part of the config. + let mut env: Vec<_> = config + .env() + .iter() + .filter(|(env_key, _val)| env_key.starts_with(key.as_env_key())) + .collect(); + if !env.is_empty() { + env.sort_by_key(|x| x.0); + drop_println!(config, "# Environment variables"); + for (key, value) in env { + // Displaying this in "shell" syntax instead of TOML, since that + // somehow makes more sense to me. + let val = shell_escape::escape(Cow::Borrowed(value)); + drop_println!(config, "# {}={}", key, val); + } + drop_println!(config, ""); + } + + let unmerged = config.load_values_unmerged()?; + for mut cv in unmerged { + if trim_cv(&mut cv, key)? { + print_table(&cv); + } + } + Ok(()) +} diff -Nru cargo-0.53.0/src/cargo/ops/cargo_doc.rs cargo-0.54.0/src/cargo/ops/cargo_doc.rs --- cargo-0.53.0/src/cargo/ops/cargo_doc.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_doc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,5 @@ use crate::core::compiler::RustcTargetData; -use crate::core::resolver::{features::RequestedFeatures, HasDevUnits, ResolveOpts}; +use crate::core::resolver::HasDevUnits; use crate::core::{Shell, Workspace}; use crate::ops; use crate::util::CargoResult; @@ -19,20 +19,12 @@ /// Main method for `cargo doc`. pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> { let specs = options.compile_opts.spec.to_package_id_specs(ws)?; - let opts = ResolveOpts::new( - /*dev_deps*/ true, - RequestedFeatures::from_command_line( - &options.compile_opts.features, - options.compile_opts.all_features, - !options.compile_opts.no_default_features, - ), - ); let target_data = RustcTargetData::new(ws, &options.compile_opts.build_config.requested_kinds)?; let ws_resolve = ops::resolve_ws_with_opts( ws, &target_data, &options.compile_opts.build_config.requested_kinds, - &opts, + &options.compile_opts.cli_features, &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, diff -Nru cargo-0.53.0/src/cargo/ops/cargo_generate_lockfile.rs cargo-0.54.0/src/cargo/ops/cargo_generate_lockfile.rs --- cargo-0.53.0/src/cargo/ops/cargo_generate_lockfile.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_generate_lockfile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,7 +4,7 @@ use termcolor::Color::{self, Cyan, Green, Red}; use crate::core::registry::PackageRegistry; -use crate::core::resolver::ResolveOpts; +use crate::core::resolver::features::{CliFeatures, HasDevUnits}; use crate::core::{PackageId, PackageIdSpec}; use crate::core::{Resolve, SourceId, Workspace}; use crate::ops; @@ -25,7 +25,8 @@ let mut resolve = ops::resolve_with_previous( &mut registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, None, None, &[], @@ -44,10 +45,6 @@ anyhow::bail!("you can't generate a lockfile for an empty workspace.") } - if opts.config.offline() { - anyhow::bail!("you can't update in the offline mode"); - } - // Updates often require a lot of modifications to the registry, so ensure // that we're synchronized against other Cargos. let _lock = ws.config().acquire_package_cache_lock()?; @@ -65,7 +62,8 @@ ops::resolve_with_previous( &mut registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, None, None, &[], @@ -119,7 +117,8 @@ let mut resolve = ops::resolve_with_previous( &mut registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, Some(&previous_resolve), Some(&to_avoid), &[], diff -Nru cargo-0.53.0/src/cargo/ops/cargo_install.rs cargo-0.54.0/src/cargo/ops/cargo_install.rs --- cargo-0.53.0/src/cargo/ops/cargo_install.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_install.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,18 +3,19 @@ use std::sync::Arc; use std::{env, fs}; -use anyhow::{bail, format_err}; -use semver::VersionReq; -use tempfile::Builder as TempFileBuilder; - use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, Freshness, UnitOutput}; use crate::core::{Dependency, Edition, Package, PackageId, Source, SourceId, Workspace}; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::{GitSource, PathSource, SourceConfigMap}; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::{paths, Config, Filesystem, Rustc, ToSemver}; +use crate::util::errors::CargoResult; +use crate::util::{Config, Filesystem, Rustc, ToSemver}; use crate::{drop_println, ops}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::paths; +use semver::VersionReq; +use tempfile::Builder as TempFileBuilder; + struct Transaction { bins: Vec, } @@ -350,13 +351,13 @@ check_yanked_install(&ws)?; let exec: Arc = Arc::new(DefaultExecutor); - let compile = ops::compile_ws(&ws, opts, &exec).chain_err(|| { + let compile = ops::compile_ws(&ws, opts, &exec).with_context(|| { if let Some(td) = td_opt.take() { // preserve the temporary directory, so the user can inspect it td.into_path(); } - format_err!( + format!( "failed to compile `{}`, intermediate artifacts can be \ found at `{}`", pkg, @@ -420,8 +421,8 @@ let src = staging_dir.path().join(bin); let dst = dst.join(bin); config.shell().status("Installing", dst.display())?; - fs::rename(&src, &dst).chain_err(|| { - format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + fs::rename(&src, &dst).with_context(|| { + format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; installed.bins.push(dst); successful_bins.insert(bin.to_string()); @@ -435,8 +436,8 @@ let src = staging_dir.path().join(bin); let dst = dst.join(bin); config.shell().status("Replacing", dst.display())?; - fs::rename(&src, &dst).chain_err(|| { - format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + fs::rename(&src, &dst).with_context(|| { + format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; successful_bins.insert(bin.to_string()); } @@ -463,7 +464,7 @@ } match tracker.save() { - Err(err) => replace_result.chain_err(|| err)?, + Err(err) => replace_result.with_context(|| err)?, Ok(_) => replace_result?, } } @@ -703,21 +704,19 @@ let all_self_names = exe_names(pkg, &filter); let mut to_remove: HashMap> = HashMap::new(); // For each package that we stomped on. - for other_pkg in duplicates.values() { + for other_pkg in duplicates.values().flatten() { // Only for packages with the same name. - if let Some(other_pkg) = other_pkg { - if other_pkg.name() == pkg.name() { - // Check what the old package had installed. - if let Some(installed) = tracker.installed_bins(*other_pkg) { - // If the old install has any names that no longer exist, - // add them to the list to remove. - for installed_name in installed { - if !all_self_names.contains(installed_name.as_str()) { - to_remove - .entry(*other_pkg) - .or_default() - .insert(installed_name.clone()); - } + if other_pkg.name() == pkg.name() { + // Check what the old package had installed. + if let Some(installed) = tracker.installed_bins(*other_pkg) { + // If the old install has any names that no longer exist, + // add them to the list to remove. + for installed_name in installed { + if !all_self_names.contains(installed_name.as_str()) { + to_remove + .entry(*other_pkg) + .or_default() + .insert(installed_name.clone()); } } } @@ -738,7 +737,7 @@ ), )?; paths::remove_file(&full_path) - .chain_err(|| format!("failed to remove {:?}", full_path))?; + .with_context(|| format!("failed to remove {:?}", full_path))?; } } } diff -Nru cargo-0.53.0/src/cargo/ops/cargo_new.rs cargo-0.54.0/src/cargo/ops/cargo_new.rs --- cargo-0.53.0/src/cargo/ops/cargo_new.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_new.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,13 +1,12 @@ use crate::core::{Edition, Shell, Workspace}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; -use crate::util::{paths, restricted_names, Config}; -use git2::Config as GitConfig; -use git2::Repository as GitRepository; +use crate::util::{restricted_names, Config}; +use anyhow::Context as _; +use cargo_util::paths; use serde::de; use serde::Deserialize; use std::collections::BTreeMap; -use std::env; use std::fmt; use std::io::{BufRead, BufReader, ErrorKind}; use std::path::{Path, PathBuf}; @@ -128,8 +127,14 @@ #[derive(Deserialize)] struct CargoNewConfig { + #[deprecated = "cargo-new no longer supports adding the authors field"] + #[allow(dead_code)] name: Option, + + #[deprecated = "cargo-new no longer supports adding the authors field"] + #[allow(dead_code)] email: Option, + #[serde(rename = "vcs")] version_control: Option, } @@ -412,8 +417,8 @@ registry: opts.registry.as_deref(), }; - mk(config, &mkopts).chain_err(|| { - anyhow::format_err!( + mk(config, &mkopts).with_context(|| { + format!( "Failed to create package `{}` at `{}`", name, path.display() @@ -496,8 +501,8 @@ registry: opts.registry.as_deref(), }; - mk(config, &mkopts).chain_err(|| { - anyhow::format_err!( + mk(config, &mkopts).with_context(|| { + format!( "Failed to create package `{}` at `{}`", name, path.display() @@ -665,32 +670,6 @@ init_vcs(path, vcs, config)?; write_ignore_file(path, &ignore, vcs)?; - let (discovered_name, discovered_email) = discover_author(path); - - // "Name " or "Name" or "" or None if neither name nor email is obtained - // cfg takes priority over the discovered ones - let author_name = cfg.name.or(discovered_name); - let author_email = cfg.email.or(discovered_email); - - let author = match (author_name, author_email) { - (Some(name), Some(email)) => { - if email.is_empty() { - Some(name) - } else { - Some(format!("{} <{}>", name, email)) - } - } - (Some(name), None) => Some(name), - (None, Some(email)) => { - if email.is_empty() { - None - } else { - Some(format!("<{}>", email)) - } - } - (None, None) => None, - }; - let mut cargotoml_path_specifier = String::new(); // Calculate what `[lib]` and `[[bin]]`s we need to append to `Cargo.toml`. @@ -729,7 +708,6 @@ r#"[package] name = "{}" version = "0.1.0" -authors = [{}] edition = {} {} # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -737,10 +715,6 @@ [dependencies] {}"#, name, - match author { - Some(value) => format!("{}", toml::Value::String(value)), - None => format!(""), - }, match opts.edition { Some(edition) => toml::Value::String(edition.to_string()), None => toml::Value::String(Edition::LATEST_STABLE.to_string()), @@ -810,76 +784,3 @@ Ok(()) } - -fn get_environment_variable(variables: &[&str]) -> Option { - variables.iter().filter_map(|var| env::var(var).ok()).next() -} - -fn discover_author(path: &Path) -> (Option, Option) { - let git_config = find_git_config(path); - let git_config = git_config.as_ref(); - - let name_variables = [ - "CARGO_NAME", - "GIT_AUTHOR_NAME", - "GIT_COMMITTER_NAME", - "USER", - "USERNAME", - "NAME", - ]; - let name = get_environment_variable(&name_variables[0..3]) - .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok())) - .or_else(|| get_environment_variable(&name_variables[3..])); - - let name = name.map(|namestr| namestr.trim().to_string()); - - let email_variables = [ - "CARGO_EMAIL", - "GIT_AUTHOR_EMAIL", - "GIT_COMMITTER_EMAIL", - "EMAIL", - ]; - let email = get_environment_variable(&email_variables[0..3]) - .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok())) - .or_else(|| get_environment_variable(&email_variables[3..])); - - let email = email.map(|s| { - let mut s = s.trim(); - - // In some cases emails will already have <> remove them since they - // are already added when needed. - if s.starts_with('<') && s.ends_with('>') { - s = &s[1..s.len() - 1]; - } - - s.to_string() - }); - - (name, email) -} - -fn find_git_config(path: &Path) -> Option { - match env::var("__CARGO_TEST_ROOT") { - Ok(_) => find_tests_git_config(path), - Err(_) => find_real_git_config(path), - } -} - -fn find_tests_git_config(path: &Path) -> Option { - // Don't escape the test sandbox when looking for a git repository. - // NOTE: libgit2 has support to define the path ceiling in - // git_repository_discover, but the git2 bindings do not expose that. - for path in paths::ancestors(path, None) { - if let Ok(repo) = GitRepository::open(path) { - return Some(repo.config().expect("test repo should have valid config")); - } - } - GitConfig::open_default().ok() -} - -fn find_real_git_config(path: &Path) -> Option { - GitRepository::discover(path) - .and_then(|repo| repo.config()) - .or_else(|_| GitConfig::open_default()) - .ok() -} diff -Nru cargo-0.53.0/src/cargo/ops/cargo_output_metadata.rs cargo-0.54.0/src/cargo/ops/cargo_output_metadata.rs --- cargo-0.53.0/src/cargo/ops/cargo_output_metadata.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_output_metadata.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,7 @@ use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::package::SerializedPackage; -use crate::core::resolver::{features::RequestedFeatures, HasDevUnits, Resolve, ResolveOpts}; +use crate::core::resolver::{features::CliFeatures, HasDevUnits, Resolve}; use crate::core::{Dependency, Package, PackageId, Workspace}; use crate::ops::{self, Packages}; use crate::util::interning::InternedString; @@ -14,9 +14,7 @@ const VERSION: u32 = 1; pub struct OutputMetadataOptions { - pub features: Vec, - pub no_default_features: bool, - pub all_features: bool, + pub cli_features: CliFeatures, pub no_deps: bool, pub version: u32, pub filter_platforms: Vec, @@ -115,12 +113,6 @@ let target_data = RustcTargetData::new(ws, &requested_kinds)?; // Resolve entire workspace. let specs = Packages::All.to_package_id_specs(ws)?; - let requested_features = RequestedFeatures::from_command_line( - &metadata_opts.features, - metadata_opts.all_features, - !metadata_opts.no_default_features, - ); - let resolve_opts = ResolveOpts::new(/*dev_deps*/ true, requested_features); let force_all = if metadata_opts.filter_platforms.is_empty() { crate::core::resolver::features::ForceAllTargets::Yes } else { @@ -133,7 +125,7 @@ ws, &target_data, &requested_kinds, - &resolve_opts, + &metadata_opts.cli_features, &specs, HasDevUnits::Yes, force_all, @@ -179,7 +171,7 @@ pkg_id: PackageId, resolve: &Resolve, package_map: &BTreeMap, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'_>, requested_kinds: &[CompileKind], ) { if node_map.contains_key(&pkg_id) { diff -Nru cargo-0.53.0/src/cargo/ops/cargo_package.rs cargo-0.54.0/src/cargo/ops/cargo_package.rs --- cargo-0.53.0/src/cargo/ops/cargo_package.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_package.rs 2021-04-27 14:35:53.000000000 +0000 @@ -6,20 +6,21 @@ use std::rc::Rc; use std::sync::Arc; -use flate2::read::GzDecoder; -use flate2::{Compression, GzBuilder}; -use log::debug; -use tar::{Archive, Builder, EntryType, Header, HeaderMode}; - use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; +use crate::core::resolver::CliFeatures; use crate::core::{Feature, Shell, Verbosity, Workspace}; use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId}; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; +use crate::util::errors::CargoResult; use crate::util::toml::TomlManifest; use crate::util::{self, restricted_names, Config, FileLock}; use crate::{drop_println, ops}; +use anyhow::Context as _; +use cargo_util::paths; +use flate2::read::GzDecoder; +use flate2::{Compression, GzBuilder}; +use log::debug; +use tar::{Archive, Builder, EntryType, Header, HeaderMode}; pub struct PackageOpts<'cfg> { pub config: &'cfg Config, @@ -29,9 +30,7 @@ pub verify: bool, pub jobs: Option, pub targets: Vec, - pub features: Vec, - pub all_features: bool, - pub no_default_features: bool, + pub cli_features: CliFeatures, } const VCS_INFO_FILE: &str = ".cargo_vcs_info.json"; @@ -124,17 +123,17 @@ .status("Packaging", pkg.package_id().to_string())?; dst.file().set_len(0)?; tar(ws, ar_files, dst.file(), &filename) - .chain_err(|| anyhow::format_err!("failed to prepare local package for uploading"))?; + .with_context(|| "failed to prepare local package for uploading")?; if opts.verify { dst.seek(SeekFrom::Start(0))?; - run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")? + run_verify(ws, &dst, opts).with_context(|| "failed to verify package tarball")? } dst.seek(SeekFrom::Start(0))?; { let src_path = dst.path(); let dst_path = dst.parent().join(&filename); fs::rename(&src_path, &dst_path) - .chain_err(|| "failed to move temporary tarball into final location")?; + .with_context(|| "failed to move temporary tarball into final location")?; } Ok(Some(dst)) } @@ -480,7 +479,7 @@ // Prepare the encoder and its header. let filename = Path::new(filename); let encoder = GzBuilder::new() - .filename(util::path2bytes(filename)?) + .filename(paths::path2bytes(filename)?) .write(dst, Compression::best()); // Put all package files into a compressed archive. @@ -503,16 +502,16 @@ let mut header = Header::new_gnu(); match contents { FileContents::OnDisk(disk_path) => { - let mut file = File::open(&disk_path).chain_err(|| { + let mut file = File::open(&disk_path).with_context(|| { format!("failed to open for archiving: `{}`", disk_path.display()) })?; - let metadata = file.metadata().chain_err(|| { + let metadata = file.metadata().with_context(|| { format!("could not learn metadata for: `{}`", disk_path.display()) })?; header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic); header.set_cksum(); ar.append_data(&mut header, &ar_path, &mut file) - .chain_err(|| { + .with_context(|| { format!("could not archive source file `{}`", disk_path.display()) })?; } @@ -527,7 +526,7 @@ header.set_size(contents.len() as u64); header.set_cksum(); ar.append_data(&mut header, &ar_path, contents.as_bytes()) - .chain_err(|| format!("could not archive source file `{}`", rel_str))?; + .with_context(|| format!("could not archive source file `{}`", rel_str))?; } } } @@ -690,9 +689,7 @@ &ws, &ops::CompileOptions { build_config: BuildConfig::new(config, opts.jobs, &opts.targets, CompileMode::Build)?, - features: opts.features.clone(), - no_default_features: opts.no_default_features, - all_features: opts.all_features, + cli_features: opts.cli_features.clone(), spec: ops::Packages::Packages(Vec::new()), filter: ops::CompileFilter::Default { required_features_filterable: true, @@ -743,7 +740,7 @@ } Ok(result) } - let result = wrap(path).chain_err(|| format!("failed to verify output at {:?}", path))?; + let result = wrap(path).with_context(|| format!("failed to verify output at {:?}", path))?; Ok(result) } diff -Nru cargo-0.53.0/src/cargo/ops/cargo_read_manifest.rs cargo-0.54.0/src/cargo/ops/cargo_read_manifest.rs --- cargo-0.53.0/src/cargo/ops/cargo_read_manifest.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_read_manifest.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,13 +3,13 @@ use std::io; use std::path::{Path, PathBuf}; -use log::{info, trace}; - use crate::core::{EitherManifest, Package, PackageId, SourceId}; use crate::util::errors::CargoResult; use crate::util::important_paths::find_project_manifest_exact; use crate::util::toml::read_manifest; -use crate::util::{self, Config}; +use crate::util::Config; +use cargo_util::paths; +use log::{info, trace}; pub fn read_package( path: &Path, @@ -192,7 +192,7 @@ // TODO: filesystem/symlink implications? if !source_id.is_registry() { for p in nested.iter() { - let path = util::normalize_path(&path.join(p)); + let path = paths::normalize_path(&path.join(p)); let result = read_nested_packages(&path, all_packages, source_id, config, visited, errors); // Ignore broken manifests found on git repositories. diff -Nru cargo-0.53.0/src/cargo/ops/cargo_test.rs cargo-0.54.0/src/cargo/ops/cargo_test.rs --- cargo-0.53.0/src/cargo/ops/cargo_test.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_test.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,11 @@ -use std::ffi::OsString; - use crate::core::compiler::{Compilation, CompileKind, Doctest, UnitOutput}; use crate::core::shell::Verbosity; use crate::core::{TargetKind, Workspace}; use crate::ops; use crate::util::errors::CargoResult; -use crate::util::{add_path_args, CargoTestError, Config, ProcessError, Test}; +use crate::util::{add_path_args, CargoTestError, Config, Test}; +use cargo_util::ProcessError; +use std::ffi::OsString; pub struct TestOptions { pub compile_opts: ops::CompileOptions, diff -Nru cargo-0.53.0/src/cargo/ops/cargo_uninstall.rs cargo-0.54.0/src/cargo/ops/cargo_uninstall.rs --- cargo-0.53.0/src/cargo/ops/cargo_uninstall.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_uninstall.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,15 +1,14 @@ -use anyhow::bail; -use std::collections::BTreeSet; -use std::env; - use crate::core::PackageId; use crate::core::{PackageIdSpec, SourceId}; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::PathSource; use crate::util::errors::CargoResult; -use crate::util::paths; use crate::util::Config; use crate::util::Filesystem; +use anyhow::bail; +use cargo_util::paths; +use std::collections::BTreeSet; +use std::env; pub fn uninstall( root: Option<&str>, diff -Nru cargo-0.53.0/src/cargo/ops/common_for_install_and_uninstall.rs cargo-0.54.0/src/cargo/ops/common_for_install_and_uninstall.rs --- cargo-0.53.0/src/cargo/ops/common_for_install_and_uninstall.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/common_for_install_and_uninstall.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,15 +3,16 @@ use std::io::prelude::*; use std::io::SeekFrom; use std::path::{Path, PathBuf}; +use std::rc::Rc; -use anyhow::{bail, format_err}; +use anyhow::{bail, format_err, Context as _}; use serde::{Deserialize, Serialize}; use crate::core::compiler::Freshness; -use crate::core::{Dependency, Package, PackageId, Source, SourceId}; +use crate::core::{Dependency, FeatureValue, Package, PackageId, Source, SourceId}; use crate::ops::{self, CompileFilter, CompileOptions}; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::Config; use crate::util::{FileLock, Filesystem}; @@ -100,12 +101,11 @@ if contents.is_empty() { Ok(CrateListingV1::default()) } else { - Ok(toml::from_str(&contents) - .chain_err(|| format_err!("invalid TOML found for metadata"))?) + Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?) } })() - .chain_err(|| { - format_err!( + .with_context(|| { + format!( "failed to parse crate metadata at `{}`", v1_lock.path().to_string_lossy() ) @@ -118,13 +118,13 @@ CrateListingV2::default() } else { serde_json::from_str(&contents) - .chain_err(|| format_err!("invalid JSON found for metadata"))? + .with_context(|| "invalid JSON found for metadata")? }; v2.sync_v1(&v1); Ok(v2) })() - .chain_err(|| { - format_err!( + .with_context(|| { + format!( "failed to parse crate metadata at `{}`", v2_lock.path().to_string_lossy() ) @@ -277,15 +277,15 @@ /// Save tracking information to disk. pub fn save(&self) -> CargoResult<()> { - self.v1.save(&self.v1_lock).chain_err(|| { - format_err!( + self.v1.save(&self.v1_lock).with_context(|| { + format!( "failed to write crate metadata at `{}`", self.v1_lock.path().to_string_lossy() ) })?; - self.v2.save(&self.v2_lock).chain_err(|| { - format_err!( + self.v2.save(&self.v2_lock).with_context(|| { + format!( "failed to write crate metadata at `{}`", self.v2_lock.path().to_string_lossy() ) @@ -422,9 +422,9 @@ if let Some(info) = self.installs.get_mut(&pkg.package_id()) { info.bins.append(&mut bins.clone()); info.version_req = version_req; - info.features = feature_set(&opts.features); - info.all_features = opts.all_features; - info.no_default_features = opts.no_default_features; + info.features = feature_set(&opts.cli_features.features); + info.all_features = opts.cli_features.all_features; + info.no_default_features = !opts.cli_features.uses_default_features; info.profile = opts.build_config.requested_profile.to_string(); info.target = Some(target.to_string()); info.rustc = Some(rustc.to_string()); @@ -434,9 +434,9 @@ InstallInfo { version_req, bins: bins.clone(), - features: feature_set(&opts.features), - all_features: opts.all_features, - no_default_features: opts.no_default_features, + features: feature_set(&opts.cli_features.features), + all_features: opts.cli_features.all_features, + no_default_features: !opts.cli_features.uses_default_features, profile: opts.build_config.requested_profile.to_string(), target: Some(target.to_string()), rustc: Some(rustc.to_string()), @@ -489,9 +489,9 @@ /// /// This does not do Package/Source/Version checking. fn is_up_to_date(&self, opts: &CompileOptions, target: &str, exes: &BTreeSet) -> bool { - self.features == feature_set(&opts.features) - && self.all_features == opts.all_features - && self.no_default_features == opts.no_default_features + self.features == feature_set(&opts.cli_features.features) + && self.all_features == opts.cli_features.all_features + && self.no_default_features != opts.cli_features.uses_default_features && self.profile.as_str() == opts.build_config.requested_profile.as_str() && (self.target.is_none() || self.target.as_deref() == Some(target)) && &self.bins == exes @@ -641,9 +641,9 @@ } } -/// Helper to convert features Vec to a BTreeSet. -fn feature_set(features: &[String]) -> BTreeSet { - features.iter().cloned().collect() +/// Helper to convert features to a BTreeSet. +fn feature_set(features: &Rc>) -> BTreeSet { + features.iter().map(|s| s.to_string()).collect() } /// Helper to get the executable names from a filter. diff -Nru cargo-0.53.0/src/cargo/ops/fix.rs cargo-0.54.0/src/cargo/ops/fix.rs --- cargo-0.53.0/src/cargo/ops/fix.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/fix.rs 2021-04-27 14:35:53.000000000 +0000 @@ -46,18 +46,19 @@ use std::str; use anyhow::{bail, Context, Error}; +use cargo_util::{paths, ProcessBuilder}; use log::{debug, trace, warn}; use rustfix::diagnostics::Diagnostic; use rustfix::{self, CodeFix}; use crate::core::compiler::RustcTargetData; -use crate::core::resolver::features::{FeatureOpts, FeatureResolver, RequestedFeatures}; -use crate::core::resolver::{HasDevUnits, ResolveBehavior, ResolveOpts}; +use crate::core::resolver::features::{FeatureOpts, FeatureResolver}; +use crate::core::resolver::{HasDevUnits, ResolveBehavior}; use crate::core::{Edition, MaybePackage, Workspace}; use crate::ops::{self, CompileOptions}; use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer}; use crate::util::errors::CargoResult; -use crate::util::{self, paths, Config, ProcessBuilder}; +use crate::util::Config; use crate::util::{existing_vcs_repo, LockServer, LockServerClient}; use crate::{drop_eprint, drop_eprintln}; @@ -84,7 +85,7 @@ // Spin up our lock server, which our subprocesses will use to synchronize fixes. let lock_server = LockServer::new()?; - let mut wrapper = util::process(env::current_exe()?); + let mut wrapper = ProcessBuilder::new(env::current_exe()?); wrapper.env(FIX_ENV, lock_server.addr().to_string()); let _started = lock_server.start()?; @@ -227,14 +228,6 @@ // 2018 without `resolver` set must be V1 assert_eq!(ws.resolve_behavior(), ResolveBehavior::V1); let specs = opts.compile_opts.spec.to_package_id_specs(ws)?; - let resolve_opts = ResolveOpts::new( - /*dev_deps*/ true, - RequestedFeatures::from_command_line( - &opts.compile_opts.features, - opts.compile_opts.all_features, - !opts.compile_opts.no_default_features, - ), - ); let target_data = RustcTargetData::new(ws, &opts.compile_opts.build_config.requested_kinds)?; // HasDevUnits::No because that may uncover more differences. // This is not the same as what `cargo fix` is doing, since it is doing @@ -243,7 +236,7 @@ ws, &target_data, &opts.compile_opts.build_config.requested_kinds, - &resolve_opts, + &opts.compile_opts.cli_features, &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, @@ -255,7 +248,7 @@ &target_data, &ws_resolve.targeted_resolve, &ws_resolve.pkg_set, - &resolve_opts.features, + &opts.compile_opts.cli_features, &specs, &opts.compile_opts.build_config.requested_kinds, feature_opts, @@ -322,7 +315,7 @@ let workspace_rustc = std::env::var("RUSTC_WORKSPACE_WRAPPER") .map(PathBuf::from) .ok(); - let rustc = util::process(&args.rustc).wrapped(workspace_rustc.as_ref()); + let rustc = ProcessBuilder::new(&args.rustc).wrapped(workspace_rustc.as_ref()); trace!("start rustfixing {:?}", args.file); let fixes = rustfix_crate(&lock_addr, &rustc, &args.file, &args, config)?; @@ -595,7 +588,7 @@ // Attempt to read the source code for this file. If this fails then // that'd be pretty surprising, so log a message and otherwise keep // going. - let code = match util::paths::read(file.as_ref()) { + let code = match paths::read(file.as_ref()) { Ok(s) => s, Err(e) => { warn!("failed to read `{}`: {}", file, e); diff -Nru cargo-0.53.0/src/cargo/ops/lockfile.rs cargo-0.54.0/src/cargo/ops/lockfile.rs --- cargo-0.53.0/src/cargo/ops/lockfile.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/lockfile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,12 @@ use std::io::prelude::*; use crate::core::{resolver, Resolve, ResolveVersion, Workspace}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::toml as cargo_toml; use crate::util::Filesystem; +use anyhow::Context as _; + pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult> { if !ws.root().join("Cargo.lock").exists() { return Ok(None); @@ -15,14 +17,14 @@ let mut s = String::new(); f.read_to_string(&mut s) - .chain_err(|| format!("failed to read file: {}", f.path().display()))?; + .with_context(|| format!("failed to read file: {}", f.path().display()))?; let resolve = (|| -> CargoResult> { let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; let v: resolver::EncodableResolve = resolve.try_into()?; Ok(Some(v.into_resolve(&s, ws)?)) })() - .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; + .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?; Ok(resolve) } @@ -80,7 +82,7 @@ f.write_all(out.as_bytes())?; Ok(()) }) - .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; + .with_context(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; Ok(()) } diff -Nru cargo-0.53.0/src/cargo/ops/mod.rs cargo-0.54.0/src/cargo/ops/mod.rs --- cargo-0.53.0/src/cargo/ops/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -31,6 +31,7 @@ mod cargo_clean; mod cargo_compile; +pub mod cargo_config; mod cargo_doc; mod cargo_fetch; mod cargo_generate_lockfile; diff -Nru cargo-0.53.0/src/cargo/ops/registry/auth.rs cargo-0.54.0/src/cargo/ops/registry/auth.rs --- cargo-0.53.0/src/cargo/ops/registry/auth.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/registry/auth.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,9 +1,9 @@ //! Registry authentication support. use crate::sources::CRATES_IO_REGISTRY; -use crate::util::{config, process_error, CargoResult, CargoResultExt, Config}; -use anyhow::bail; -use anyhow::format_err; +use crate::util::{config, CargoResult, Config}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::ProcessError; use std::io::{Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; @@ -134,7 +134,7 @@ } Action::Erase => {} } - let mut child = cmd.spawn().chain_err(|| { + let mut child = cmd.spawn().with_context(|| { let verb = match action { Action::Get => "fetch", Action::Store(_) => "store", @@ -157,7 +157,7 @@ .as_mut() .unwrap() .read_to_string(&mut buffer) - .chain_err(|| { + .with_context(|| { format!( "failed to read token from registry credential process `{}`", exe.display() @@ -176,7 +176,7 @@ token = Some(buffer); } Action::Store(token) => { - writeln!(child.stdin.as_ref().unwrap(), "{}", token).chain_err(|| { + writeln!(child.stdin.as_ref().unwrap(), "{}", token).with_context(|| { format!( "failed to send token to registry credential process `{}`", exe.display() @@ -185,7 +185,7 @@ } Action::Erase => {} } - let status = child.wait().chain_err(|| { + let status = child.wait().with_context(|| { format!( "registry credential process `{}` exit failure", exe.display() @@ -197,7 +197,7 @@ Action::Store(_) => "failed to store token to registry", Action::Erase => "failed to erase token from registry", }; - return Err(process_error( + return Err(ProcessError::new( &format!( "registry credential process `{}` {} `{}`", exe.display(), diff -Nru cargo-0.53.0/src/cargo/ops/registry.rs cargo-0.54.0/src/cargo/ops/registry.rs --- cargo-0.53.0/src/cargo/ops/registry.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -7,7 +7,8 @@ use std::time::Duration; use std::{cmp, env}; -use anyhow::{bail, format_err}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::paths; use crates_io::{self, NewCrate, NewCrateDependency, Registry}; use curl::easy::{Easy, InfoType, SslOpt, SslVersion}; use log::{log, Level}; @@ -15,15 +16,16 @@ use crate::core::dependency::DepKind; use crate::core::manifest::ManifestMetadata; +use crate::core::resolver::CliFeatures; use crate::core::source::Source; use crate::core::{Package, SourceId, Workspace}; use crate::ops; use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_REGISTRY}; use crate::util::config::{self, Config, SslVersionConfig, SslVersionConfigRange}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::important_paths::find_root_manifest_for_wd; +use crate::util::validate_package_name; use crate::util::IntoUrl; -use crate::util::{paths, validate_package_name}; use crate::{drop_print, drop_println, version}; mod auth; @@ -51,9 +53,7 @@ pub targets: Vec, pub dry_run: bool, pub registry: Option, - pub features: Vec, - pub all_features: bool, - pub no_default_features: bool, + pub cli_features: CliFeatures, } pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { @@ -111,9 +111,7 @@ allow_dirty: opts.allow_dirty, targets: opts.targets.clone(), jobs: opts.jobs, - features: opts.features.clone(), - all_features: opts.all_features, - no_default_features: opts.no_default_features, + cli_features: opts.cli_features.clone(), }, )? .unwrap(); @@ -258,7 +256,7 @@ .as_ref() .map(|readme| { paths::read(&pkg.root().join(readme)) - .chain_err(|| format!("failed to read `readme` file for package `{}`", pkg)) + .with_context(|| format!("failed to read `readme` file for package `{}`", pkg)) }) .transpose()?; if let Some(ref file) = *license_file { @@ -310,7 +308,7 @@ }, tarball, ) - .chain_err(|| format!("failed to publish to registry at {}", registry.host()))?; + .with_context(|| format!("failed to publish to registry at {}", registry.host()))?; if !warnings.invalid_categories.is_empty() { let msg = format!( @@ -454,7 +452,7 @@ let cfg = src.config(); let mut updated_cfg = || { src.update() - .chain_err(|| format!("failed to update {}", sid))?; + .with_context(|| format!("failed to update {}", sid))?; src.config() }; @@ -728,7 +726,7 @@ input .lock() .read_line(&mut line) - .chain_err(|| "failed to read stdin")?; + .with_context(|| "failed to read stdin")?; // Automatically remove `cargo login` from an inputted token to // allow direct pastes from `registry.host()`/me. line.replace("cargo login", "").trim().to_string() @@ -817,7 +815,7 @@ if let Some(ref v) = opts.to_add { let v = v.iter().map(|s| &s[..]).collect::>(); - let msg = registry.add_owners(&name, &v).chain_err(|| { + let msg = registry.add_owners(&name, &v).with_context(|| { format!( "failed to invite owners to crate `{}` on registry at {}", name, @@ -833,7 +831,7 @@ config .shell() .status("Owner", format!("removing {:?} from crate {}", v, name))?; - registry.remove_owners(&name, &v).chain_err(|| { + registry.remove_owners(&name, &v).with_context(|| { format!( "failed to remove owners from crate `{}` on registry at {}", name, @@ -843,7 +841,7 @@ } if opts.list { - let owners = registry.list_owners(&name).chain_err(|| { + let owners = registry.list_owners(&name).with_context(|| { format!( "failed to list owners of crate `{}` on registry at {}", name, @@ -891,7 +889,7 @@ config .shell() .status("Unyank", format!("{}:{}", name, version))?; - registry.unyank(&name, &version).chain_err(|| { + registry.unyank(&name, &version).with_context(|| { format!( "failed to undo a yank from the registry at {}", registry.host() @@ -903,7 +901,7 @@ .status("Yank", format!("{}:{}", name, version))?; registry .yank(&name, &version) - .chain_err(|| format!("failed to yank from the registry at {}", registry.host()))?; + .with_context(|| format!("failed to yank from the registry at {}", registry.host()))?; } Ok(()) @@ -949,7 +947,7 @@ } let (mut registry, _, source_id) = registry(config, None, index, reg, false, false)?; - let (crates, total_crates) = registry.search(query, limit).chain_err(|| { + let (crates, total_crates) = registry.search(query, limit).with_context(|| { format!( "failed to retrieve search results from the registry at {}", registry.host() diff -Nru cargo-0.53.0/src/cargo/ops/resolve.rs cargo-0.54.0/src/cargo/ops/resolve.rs --- cargo-0.53.0/src/cargo/ops/resolve.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/resolve.rs 2021-04-27 14:35:53.000000000 +0000 @@ -11,18 +11,21 @@ //! providing the most power and flexibility. use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::registry::PackageRegistry; +use crate::core::registry::{LockedPatchDependency, PackageRegistry}; use crate::core::resolver::features::{ - FeatureOpts, FeatureResolver, ForceAllTargets, ResolvedFeatures, + CliFeatures, FeatureOpts, FeatureResolver, ForceAllTargets, RequestedFeatures, ResolvedFeatures, }; -use crate::core::resolver::{self, HasDevUnits, Resolve, ResolveOpts}; +use crate::core::resolver::{self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion}; use crate::core::summary::Summary; use crate::core::Feature; -use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; +use crate::core::{ + GitReference, PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace, +}; use crate::ops; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::{profile, CanonicalUrl}; +use anyhow::Context as _; use log::{debug, trace}; use std::collections::HashSet; @@ -76,9 +79,9 @@ /// members. In this case, `opts.all_features` must be `true`. pub fn resolve_ws_with_opts<'cfg>( ws: &Workspace<'cfg>, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, requested_targets: &[CompileKind], - opts: &ResolveOpts, + cli_features: &CliFeatures, specs: &[PackageIdSpec], has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, @@ -120,7 +123,8 @@ let resolved_with_overrides = resolve_with_previous( &mut registry, ws, - opts, + cli_features, + has_dev_units, resolve.as_ref(), None, specs, @@ -130,7 +134,7 @@ let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?; let member_ids = ws - .members_with_features(specs, &opts.features)? + .members_with_features(specs, cli_features)? .into_iter() .map(|(p, _fts)| p.package_id()) .collect::>(); @@ -149,7 +153,7 @@ target_data, &resolved_with_overrides, &pkg_set, - &opts.features, + cli_features, specs, requested_targets, feature_opts, @@ -171,7 +175,8 @@ let mut resolve = resolve_with_previous( registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, prev.as_ref(), None, &[], @@ -202,7 +207,8 @@ pub fn resolve_with_previous<'cfg>( registry: &mut PackageRegistry<'cfg>, ws: &Workspace<'cfg>, - opts: &ResolveOpts, + cli_features: &CliFeatures, + has_dev_units: HasDevUnits, previous: Option<&Resolve>, to_avoid: Option<&HashSet>, specs: &[PackageIdSpec], @@ -251,26 +257,91 @@ continue; } }; - let patches = patches - .iter() - .map(|dep| { - let unused = previous.unused_patches().iter().cloned(); - let candidates = previous.iter().chain(unused); - match candidates - .filter(pre_patch_keep) - .find(|&id| dep.matches_id(id)) - { - Some(id) => { - let mut locked_dep = dep.clone(); - locked_dep.lock_to(id); - (dep, Some((locked_dep, id))) + + // This is a list of pairs where the first element of the pair is + // the raw `Dependency` which matches what's listed in `Cargo.toml`. + // The second element is, if present, the "locked" version of + // the `Dependency` as well as the `PackageId` that it previously + // resolved to. This second element is calculated by looking at the + // previous resolve graph, which is primarily what's done here to + // build the `registrations` list. + let mut registrations = Vec::new(); + for dep in patches { + let candidates = || { + previous + .iter() + .chain(previous.unused_patches().iter().cloned()) + .filter(&pre_patch_keep) + }; + + let lock = match candidates().find(|id| dep.matches_id(*id)) { + // If we found an exactly matching candidate in our list of + // candidates, then that's the one to use. + Some(package_id) => { + let mut locked_dep = dep.clone(); + locked_dep.lock_to(package_id); + Some(LockedPatchDependency { + dependency: locked_dep, + package_id, + alt_package_id: None, + }) + } + None => { + // If the candidate does not have a matching source id + // then we may still have a lock candidate. If we're + // loading a v2-encoded resolve graph and `dep` is a + // git dep with `branch = 'master'`, then this should + // also match candidates without `branch = 'master'` + // (which is now treated separately in Cargo). + // + // In this scenario we try to convert candidates located + // in the resolve graph to explicitly having the + // `master` branch (if they otherwise point to + // `DefaultBranch`). If this works and our `dep` + // matches that then this is something we'll lock to. + match candidates().find(|&id| { + match master_branch_git_source(id, previous) { + Some(id) => dep.matches_id(id), + None => false, + } + }) { + Some(id_using_default) => { + let id_using_master = id_using_default.with_source_id( + dep.source_id().with_precise( + id_using_default + .source_id() + .precise() + .map(|s| s.to_string()), + ), + ); + + let mut locked_dep = dep.clone(); + locked_dep.lock_to(id_using_master); + Some(LockedPatchDependency { + dependency: locked_dep, + package_id: id_using_master, + // Note that this is where the magic + // happens, where the resolve graph + // probably has locks pointing to + // DefaultBranch sources, and by including + // this here those will get transparently + // rewritten to Branch("master") which we + // have a lock entry for. + alt_package_id: Some(id_using_default), + }) + } + + // No locked candidate was found + None => None, } - None => (dep, None), } - }) - .collect::>(); + }; + + registrations.push((dep, lock)); + } + let canonical = CanonicalUrl::new(url)?; - for (orig_patch, unlock_id) in registry.patch(url, &patches)? { + for (orig_patch, unlock_id) in registry.patch(url, ®istrations)? { // Avoid the locked patch ID. avoid_patch_ids.insert(unlock_id); // Also avoid the thing it is patching. @@ -285,12 +356,13 @@ let keep = |p: &PackageId| pre_patch_keep(p) && !avoid_patch_ids.contains(p); + let dev_deps = ws.require_optional_deps() || has_dev_units == HasDevUnits::Yes; // In the case where a previous instance of resolve is available, we // want to lock as many packages as possible to the previous version // without disturbing the graph structure. if let Some(r) = previous { trace!("previous: {:?}", r); - register_previous_locks(ws, registry, r, &keep); + register_previous_locks(ws, registry, r, &keep, dev_deps); } // Everything in the previous lock file we want to keep is prioritized // in dependency selection if it comes up, aka we want to have @@ -315,15 +387,15 @@ } let summaries: Vec<(Summary, ResolveOpts)> = ws - .members_with_features(specs, &opts.features)? + .members_with_features(specs, cli_features)? .into_iter() .map(|(member, features)| { let summary = registry.lock(member.summary().clone()); ( summary, ResolveOpts { - dev_deps: opts.dev_deps, - features, + dev_deps, + features: RequestedFeatures::CliFeatures(features), }, ) }) @@ -406,7 +478,7 @@ for (path, definition) in paths { let id = SourceId::for_path(&path)?; let mut source = PathSource::new_recursive(&path, id, ws.config()); - source.update().chain_err(|| { + source.update().with_context(|| { format!( "failed to update path override `{}` \ (defined in `{}`)", @@ -448,6 +520,7 @@ registry: &mut PackageRegistry<'_>, resolve: &Resolve, keep: &dyn Fn(&PackageId) -> bool, + dev_deps: bool, ) { let path_pkg = |id: SourceId| { if !id.is_path() { @@ -557,6 +630,11 @@ continue; } + // If dev-dependencies aren't being resolved, skip them. + if !dep.is_transitive() && !dev_deps { + continue; + } + // If this is a path dependency, then try to push it onto our // worklist. if let Some(pkg) = path_pkg(dep.source_id()) { @@ -599,7 +677,22 @@ .deps_not_replaced(node) .map(|p| p.0) .filter(keep) - .collect(); + .collect::>(); + + // In the v2 lockfile format and prior the `branch=master` dependency + // directive was serialized the same way as the no-branch-listed + // directive. Nowadays in Cargo, however, these two directives are + // considered distinct and are no longer represented the same way. To + // maintain compatibility with older lock files we register locked nodes + // for *both* the master branch and the default branch. + // + // Note that this is only applicable for loading older resolves now at + // this point. All new lock files are encoded as v3-or-later, so this is + // just compat for loading an old lock file successfully. + if let Some(node) = master_branch_git_source(node, resolve) { + registry.register_lock(node, deps.clone()); + } + registry.register_lock(node, deps); } @@ -614,3 +707,17 @@ } } } + +fn master_branch_git_source(id: PackageId, resolve: &Resolve) -> Option { + if resolve.version() <= ResolveVersion::V2 { + let source = id.source_id(); + if let Some(GitReference::DefaultBranch) = source.git_reference() { + let new_source = + SourceId::for_git(source.url(), GitReference::Branch("master".to_string())) + .unwrap() + .with_precise(source.precise().map(|s| s.to_string())); + return Some(id.with_source_id(new_source)); + } + } + None +} diff -Nru cargo-0.53.0/src/cargo/ops/tree/graph.rs cargo-0.54.0/src/cargo/ops/tree/graph.rs --- cargo-0.53.0/src/cargo/ops/tree/graph.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/tree/graph.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,7 +3,7 @@ use super::TreeOptions; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; -use crate::core::resolver::features::{FeaturesFor, RequestedFeatures, ResolvedFeatures}; +use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; use crate::core::resolver::Resolve; use crate::core::{FeatureMap, FeatureValue, Package, PackageId, PackageIdSpec, Workspace}; use crate::util::interning::InternedString; @@ -248,16 +248,16 @@ resolve: &Resolve, resolved_features: &ResolvedFeatures, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - target_data: &RustcTargetData, + cli_features: &CliFeatures, + target_data: &RustcTargetData<'_>, requested_kinds: &[CompileKind], package_map: HashMap, opts: &TreeOptions, ) -> CargoResult> { let mut graph = Graph::new(package_map); - let mut members_with_features = ws.members_with_features(specs, requested_features)?; + let mut members_with_features = ws.members_with_features(specs, cli_features)?; members_with_features.sort_unstable_by_key(|e| e.0.package_id()); - for (member, requested_features) in members_with_features { + for (member, cli_features) in members_with_features { let member_id = member.package_id(); let features_for = FeaturesFor::from_for_host(member.proc_macro()); for kind in requested_kinds { @@ -273,7 +273,7 @@ ); if opts.graph_features { let fmap = resolve.summary(member_id).features(); - add_cli_features(&mut graph, member_index, &requested_features, fmap); + add_cli_features(&mut graph, member_index, &cli_features, fmap); } } } @@ -294,7 +294,7 @@ resolved_features: &ResolvedFeatures, package_id: PackageId, features_for: FeaturesFor, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'_>, requested_kind: CompileKind, opts: &TreeOptions, ) -> usize { @@ -392,7 +392,7 @@ EdgeKind::Dep(dep.kind()), ); } - for feature in dep.features() { + for feature in dep.features().iter() { add_feature( graph, *feature, @@ -459,48 +459,66 @@ fn add_cli_features( graph: &mut Graph<'_>, package_index: usize, - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, feature_map: &FeatureMap, ) { // NOTE: Recursive enabling of features will be handled by // add_internal_features. - // Create a list of feature names requested on the command-line. - let mut to_add: Vec = Vec::new(); - if requested_features.all_features { - to_add.extend(feature_map.keys().copied()); - // Add optional deps. - for (dep_name, deps) in &graph.dep_name_map[&package_index] { - if deps.iter().any(|(_idx, is_optional)| *is_optional) { - to_add.push(*dep_name); - } - } + // Create a set of feature names requested on the command-line. + let mut to_add: HashSet = HashSet::new(); + if cli_features.all_features { + to_add.extend(feature_map.keys().map(|feat| FeatureValue::Feature(*feat))); } else { - if requested_features.uses_default_features { - to_add.push(InternedString::new("default")); + if cli_features.uses_default_features { + to_add.insert(FeatureValue::Feature(InternedString::new("default"))); } - to_add.extend(requested_features.features.iter().copied()); + to_add.extend(cli_features.features.iter().cloned()); }; // Add each feature as a node, and mark as "from command-line" in graph.cli_features. - for name in to_add { - if name.contains('/') { - let mut parts = name.splitn(2, '/'); - let dep_name = InternedString::new(parts.next().unwrap()); - let feat_name = InternedString::new(parts.next().unwrap()); - for (dep_index, is_optional) in graph.dep_name_map[&package_index][&dep_name].clone() { - if is_optional { - // Activate the optional dep on self. - let index = - add_feature(graph, dep_name, None, package_index, EdgeKind::Feature); + for fv in to_add { + match fv { + FeatureValue::Feature(feature) => { + let index = add_feature(graph, feature, None, package_index, EdgeKind::Feature); + graph.cli_features.insert(index); + } + // This is enforced by CliFeatures. + FeatureValue::Dep { .. } => panic!("unexpected cli dep feature {}", fv), + FeatureValue::DepFeature { + dep_name, + dep_feature, + dep_prefix: _, + weak, + } => { + let dep_connections = match graph.dep_name_map[&package_index].get(&dep_name) { + // Clone to deal with immutable borrow of `graph`. :( + Some(dep_connections) => dep_connections.clone(), + None => { + // --features bar?/feat where `bar` is not activated should be ignored. + // If this wasn't weak, then this is a bug. + if weak { + continue; + } + panic!( + "missing dep graph connection for CLI feature `{}` for member {:?}\n\ + Please file a bug report at https://github.com/rust-lang/cargo/issues", + fv, + graph.nodes.get(package_index) + ); + } + }; + for (dep_index, is_optional) in dep_connections { + if is_optional { + // Activate the optional dep on self. + let index = + add_feature(graph, dep_name, None, package_index, EdgeKind::Feature); + graph.cli_features.insert(index); + } + let index = add_feature(graph, dep_feature, None, dep_index, EdgeKind::Feature); graph.cli_features.insert(index); } - let index = add_feature(graph, feat_name, None, dep_index, EdgeKind::Feature); - graph.cli_features.insert(index); } - } else { - let index = add_feature(graph, name, None, package_index, EdgeKind::Feature); - graph.cli_features.insert(index); } } } @@ -570,6 +588,10 @@ package_index, ); } + // Dependencies are already shown in the graph as dep edges. I'm + // uncertain whether or not this might be confusing in some cases + // (like feature `"somefeat" = ["dep:somedep"]`), so maybe in the + // future consider explicitly showing this? FeatureValue::Dep { .. } => {} FeatureValue::DepFeature { dep_name, diff -Nru cargo-0.53.0/src/cargo/ops/tree/mod.rs cargo-0.54.0/src/cargo/ops/tree/mod.rs --- cargo-0.53.0/src/cargo/ops/tree/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/tree/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,9 +3,7 @@ use self::format::Pattern; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; -use crate::core::resolver::{ - features::RequestedFeatures, ForceAllTargets, HasDevUnits, ResolveOpts, -}; +use crate::core::resolver::{features::CliFeatures, ForceAllTargets, HasDevUnits}; use crate::core::{Package, PackageId, PackageIdSpec, Workspace}; use crate::ops::{self, Packages}; use crate::util::{CargoResult, Config}; @@ -21,9 +19,7 @@ pub use {graph::EdgeKind, graph::Node}; pub struct TreeOptions { - pub features: Vec, - pub no_default_features: bool, - pub all_features: bool, + pub cli_features: CliFeatures, /// The packages to display the tree for. pub packages: Packages, /// The platform to filter for. @@ -138,12 +134,6 @@ let requested_kinds = CompileKind::from_requested_targets(ws.config(), &requested_targets)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; let specs = opts.packages.to_package_id_specs(ws)?; - let requested_features = RequestedFeatures::from_command_line( - &opts.features, - opts.all_features, - !opts.no_default_features, - ); - let resolve_opts = ResolveOpts::new(/*dev_deps*/ true, requested_features); let has_dev = if opts .edge_kinds .contains(&EdgeKind::Dep(DepKind::Development)) @@ -161,7 +151,7 @@ ws, &target_data, &requested_kinds, - &resolve_opts, + &opts.cli_features, &specs, has_dev, force_all, @@ -178,7 +168,7 @@ &ws_resolve.targeted_resolve, &ws_resolve.resolved_features, &specs, - &resolve_opts.features, + &opts.cli_features, &target_data, &requested_kinds, package_map, diff -Nru cargo-0.53.0/src/cargo/ops/vendor.rs cargo-0.54.0/src/cargo/ops/vendor.rs --- cargo-0.53.0/src/cargo/ops/vendor.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/vendor.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,9 +2,9 @@ use crate::core::{GitReference, Workspace}; use crate::ops; use crate::sources::path::PathSource; -use crate::util::Sha256; -use crate::util::{paths, CargoResult, CargoResultExt, Config}; -use anyhow::bail; +use crate::util::{CargoResult, Config}; +use anyhow::{bail, Context as _}; +use cargo_util::{paths, Sha256}; use serde::Serialize; use std::collections::HashSet; use std::collections::{BTreeMap, BTreeSet, HashMap}; @@ -28,8 +28,7 @@ extra_workspaces.push(ws); } let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::>(); - let vendor_config = - sync(config, &workspaces, opts).chain_err(|| "failed to sync".to_string())?; + let vendor_config = sync(config, &workspaces, opts).with_context(|| "failed to sync")?; if config.shell().verbosity() != Verbosity::Quiet { crate::drop_eprint!( @@ -104,11 +103,11 @@ // crate to work with. for ws in workspaces { let (packages, resolve) = - ops::resolve_ws(ws).chain_err(|| "failed to load pkg lockfile")?; + ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; packages .get_many(resolve.iter()) - .chain_err(|| "failed to download packages")?; + .with_context(|| "failed to download packages")?; for pkg in resolve.iter() { // Don't delete actual source code! @@ -136,11 +135,11 @@ // tables about them. for ws in workspaces { let (packages, resolve) = - ops::resolve_ws(ws).chain_err(|| "failed to load pkg lockfile")?; + ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; packages .get_many(resolve.iter()) - .chain_err(|| "failed to download packages")?; + .with_context(|| "failed to download packages")?; for pkg in resolve.iter() { // No need to vendor path crates since they're already in the @@ -152,7 +151,7 @@ pkg, packages .get_one(pkg) - .chain_err(|| "failed to fetch package")? + .with_context(|| "failed to fetch package")? .clone(), ); @@ -216,7 +215,7 @@ let paths = pathsource.list_files(pkg)?; let mut map = BTreeMap::new(); cp_sources(src, &paths, &dst, &mut map, &mut tmp_buf) - .chain_err(|| format!("failed to copy over vendored sources for: {}", id))?; + .with_context(|| format!("failed to copy over vendored sources for: {}", id))?; // Finally, emit the metadata about this package let json = serde_json::json!({ @@ -341,7 +340,7 @@ } fn copy_and_checksum(src_path: &Path, dst_path: &Path, buf: &mut [u8]) -> CargoResult { - let mut src = File::open(src_path).chain_err(|| format!("failed to open {:?}", src_path))?; + let mut src = File::open(src_path).with_context(|| format!("failed to open {:?}", src_path))?; let mut dst_opts = OpenOptions::new(); dst_opts.write(true).create(true).truncate(true); #[cfg(unix)] @@ -349,25 +348,25 @@ use std::os::unix::fs::{MetadataExt, OpenOptionsExt}; let src_metadata = src .metadata() - .chain_err(|| format!("failed to stat {:?}", src_path))?; + .with_context(|| format!("failed to stat {:?}", src_path))?; dst_opts.mode(src_metadata.mode()); } let mut dst = dst_opts .open(dst_path) - .chain_err(|| format!("failed to create {:?}", dst_path))?; + .with_context(|| format!("failed to create {:?}", dst_path))?; // Not going to bother setting mode on pre-existing files, since there // shouldn't be any under normal conditions. let mut cksum = Sha256::new(); loop { let n = src .read(buf) - .chain_err(|| format!("failed to read from {:?}", src_path))?; + .with_context(|| format!("failed to read from {:?}", src_path))?; if n == 0 { break Ok(cksum.finish_hex()); } let data = &buf[..n]; cksum.update(data); dst.write_all(data) - .chain_err(|| format!("failed to write to {:?}", dst_path))?; + .with_context(|| format!("failed to write to {:?}", dst_path))?; } } diff -Nru cargo-0.53.0/src/cargo/sources/config.rs cargo-0.54.0/src/cargo/sources/config.rs --- cargo-0.53.0/src/cargo/sources/config.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -7,9 +7,9 @@ use crate::core::{GitReference, PackageId, Source, SourceId}; use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY}; use crate::util::config::{self, ConfigRelativePath, OptValue}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::{Config, IntoUrl}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use log::debug; use std::collections::{HashMap, HashSet}; use url::Url; @@ -280,7 +280,7 @@ return Ok(()); fn url(val: &config::Value, key: &str) -> CargoResult { - let url = val.val.into_url().chain_err(|| { + let url = val.val.into_url().with_context(|| { format!( "configuration key `{}` specified an invalid \ URL (in {})", diff -Nru cargo-0.53.0/src/cargo/sources/directory.rs cargo-0.54.0/src/cargo/sources/directory.rs --- cargo-0.53.0/src/cargo/sources/directory.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/directory.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,14 +2,15 @@ use std::fmt::{self, Debug, Formatter}; use std::path::{Path, PathBuf}; -use serde::Deserialize; - use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; -use crate::util::{Config, Sha256}; +use crate::util::errors::CargoResult; +use crate::util::Config; + +use anyhow::Context as _; +use cargo_util::{paths, Sha256}; +use serde::Deserialize; pub struct DirectorySource<'cfg> { source_id: SourceId, @@ -73,7 +74,7 @@ fn update(&mut self) -> CargoResult<()> { self.packages.clear(); - let entries = self.root.read_dir().chain_err(|| { + let entries = self.root.read_dir().with_context(|| { format!( "failed to read root of directory source: {}", self.root.display() @@ -117,7 +118,7 @@ let mut pkg = src.root_package()?; let cksum_file = path.join(".cargo-checksum.json"); - let cksum = paths::read(&path.join(cksum_file)).chain_err(|| { + let cksum = paths::read(&path.join(cksum_file)).with_context(|| { format!( "failed to load checksum `.cargo-checksum.json` \ of {} v{}", @@ -125,7 +126,7 @@ pkg.package_id().version() ) })?; - let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| { + let cksum: Checksum = serde_json::from_str(&cksum).with_context(|| { format!( "failed to decode `.cargo-checksum.json` of \ {} v{}", @@ -172,7 +173,7 @@ let file = pkg.root().join(file); let actual = Sha256::new() .update_path(&file) - .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))? + .with_context(|| format!("failed to calculate checksum of: {}", file.display()))? .finish_hex(); if &*actual != cksum { anyhow::bail!( diff -Nru cargo-0.53.0/src/cargo/sources/git/source.rs cargo-0.54.0/src/cargo/sources/git/source.rs --- cargo-0.53.0/src/cargo/sources/git/source.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/git/source.rs 2021-04-27 14:35:53.000000000 +0000 @@ -126,12 +126,10 @@ // database, then try to resolve our reference with the preexisting // repository. (None, Some(db)) if self.config.offline() => { - let rev = db - .resolve(&self.manifest_reference, None) - .with_context(|| { - "failed to lookup reference in preexisting repository, and \ + let rev = db.resolve(&self.manifest_reference).with_context(|| { + "failed to lookup reference in preexisting repository, and \ can't check for updates in offline mode (--offline)" - })?; + })?; (db, rev) } diff -Nru cargo-0.53.0/src/cargo/sources/git/utils.rs cargo-0.54.0/src/cargo/sources/git/utils.rs --- cargo-0.53.0/src/cargo/sources/git/utils.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/git/utils.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,117 +1,11 @@ //! Utilities for handling git repositories, mainly around //! authentication/cloning. -//! -//! # `DefaultBranch` vs `Branch("master")` -//! -//! Long ago in a repository not so far away, an author (*cough* me *cough*) -//! didn't understand how branches work in Git. This led the author to -//! interpret these two dependency declarations the exact same way with the -//! former literally internally desugaring to the latter: -//! -//! ```toml -//! [dependencies] -//! foo = { git = "https://example.org/foo" } -//! foo = { git = "https://example.org/foo", branch = "master" } -//! ``` -//! -//! It turns out there's this things called `HEAD` in git remotes which points -//! to the "main branch" of a repository, and the main branch is not always -//! literally called master. What Cargo would like to do is to differentiate -//! these two dependency directives, with the first meaning "depend on `HEAD`". -//! -//! Unfortunately implementing this is a breaking change. This was first -//! attempted in #8364 but resulted in #8468 which has two independent bugs -//! listed on that issue. Despite this breakage we would still like to roll out -//! this change in Cargo, but we're now going to take it very slow and try to -//! break as few people as possible along the way. These comments are intended -//! to log the current progress and what wonkiness you might see within Cargo -//! when handling `DefaultBranch` vs `Branch("master")` -//! -//! ### Repositories with `master` and a default branch -//! -//! This is one of the most obvious sources of breakage. If our `foo` example -//! in above had two branches, one called `master` and another which was -//! actually the main branch, then Cargo's change will always be a breaking -//! change. This is because what's downloaded is an entirely different branch -//! if we change the meaning of the dependency directive. -//! -//! It's expected this is quite rare, but to handle this case nonetheless when -//! Cargo fetches from a git remote and the dependency specification is -//! `DefaultBranch` then it will issue a warning if the `HEAD` reference -//! doesn't match `master`. It's expected in this situation that authors will -//! fix builds locally by specifying `branch = 'master'`. -//! -//! ### Differences in `cargo vendor` configuration -//! -//! When executing `cargo vendor` it will print out configuration which can -//! then be used to configure Cargo to use the `vendor` directory. Historically -//! this configuration looked like: -//! -//! ```toml -//! [source."https://example.org/foo"] -//! git = "https://example.org/foo" -//! branch = "master" -//! replace-with = "vendored-sources" -//! ``` -//! -//! We would like to, however, transition this to not include the `branch = -//! "master"` unless the dependency directive actually mentions a branch. -//! Conveniently older Cargo implementations all interpret a missing `branch` -//! as `branch = "master"` so it's a backwards-compatible change to remove the -//! `branch = "master"` directive. As a result, `cargo vendor` will no longer -//! emit a `branch` if the git reference is `DefaultBranch` -//! -//! ### Differences in lock file formats -//! -//! Another issue pointed out in #8364 was that `Cargo.lock` files were no -//! longer compatible on stable and nightly with each other. The underlying -//! issue is that Cargo was serializing `branch = "master"` *differently* on -//! nightly than it was on stable. Historical implementations of Cargo would -//! encode `DefaultBranch` and `Branch("master")` the same way in `Cargo.lock`, -//! so when reading a lock file we have no way of differentiating between the -//! two. -//! -//! To handle this difference in encoding of `Cargo.lock` we'll be employing -//! the standard scheme to change `Cargo.lock`: -//! -//! * Add support in Cargo for a future format, don't turn it on. -//! * Wait a long time -//! * Turn on the future format -//! -//! Here the "future format" is `branch=master` shows up if you have a `branch` -//! in `Cargo.toml`, and otherwise nothing shows up in URLs. Due to the effect -//! on crate graph resolution, however, this flows into the next point.. -//! -//! ### Unification in the Cargo dependency graph -//! -//! Today dependencies with `branch = "master"` will unify with dependencies -//! that say nothing. (that's because the latter simply desugars). This means -//! the two `foo` directives above will resolve to the same dependency. -//! -//! The best idea I've got to fix this is to basically get everyone (if anyone) -//! to stop doing this today. The crate graph resolver will start to warn if it -//! detects that multiple `Cargo.toml` directives are detected and mixed. The -//! thinking is that when we turn on the new lock file format it'll also be -//! hard breaking change for any project which still has dependencies to -//! both the `master` branch and not. -//! -//! ### What we're doing today -//! -//! The general goal of Cargo today is to internally distinguish -//! `DefaultBranch` and `Branch("master")`, but for the time being they should -//! be functionally equivalent in terms of builds. The hope is that we'll let -//! all these warnings and such bake for a good long time, and eventually we'll -//! flip some switches if your build has no warnings it'll work before and -//! after. -//! -//! That's the dream at least, we'll see how this plays out. use crate::core::GitReference; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; -use crate::util::process_builder::process; +use crate::util::errors::CargoResult; use crate::util::{network, Config, IntoUrl, Progress}; -use anyhow::{anyhow, Context}; +use anyhow::{anyhow, Context as _}; +use cargo_util::{paths, ProcessBuilder}; use curl::easy::List; use git2::{self, ErrorClass, ObjectType}; use log::{debug, info}; @@ -182,7 +76,7 @@ } pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { - reference.resolve(&self.db_at(path)?.repo, None) + reference.resolve(&self.db_at(path)?.repo) } pub fn checkout( @@ -207,7 +101,7 @@ } } None => { - if let Ok(rev) = reference.resolve(&db.repo, Some((&self.url, cargo_config))) { + if let Ok(rev) = reference.resolve(&db.repo) { return Ok((db, rev)); } } @@ -226,7 +120,7 @@ .context(format!("failed to clone into: {}", into.display()))?; let rev = match locked_rev { Some(rev) => rev, - None => reference.resolve(&repo, Some((&self.url, cargo_config)))?, + None => reference.resolve(&repo)?, }; Ok(( @@ -295,21 +189,13 @@ self.repo.revparse_single(&oid.to_string()).is_ok() } - pub fn resolve( - &self, - r: &GitReference, - remote_and_config: Option<(&Url, &Config)>, - ) -> CargoResult { - r.resolve(&self.repo, remote_and_config) + pub fn resolve(&self, r: &GitReference) -> CargoResult { + r.resolve(&self.repo) } } impl GitReference { - pub fn resolve( - &self, - repo: &git2::Repository, - remote_and_config: Option<(&Url, &Config)>, - ) -> CargoResult { + pub fn resolve(&self, repo: &git2::Repository) -> CargoResult { let id = match self { // Note that we resolve the named tag here in sync with where it's // fetched into via `fetch` below. @@ -320,7 +206,7 @@ let obj = obj.peel(ObjectType::Commit)?; Ok(obj.id()) })() - .chain_err(|| format!("failed to find tag `{}`", s))?, + .with_context(|| format!("failed to find tag `{}`", s))?, // Resolve the remote name since that's all we're configuring in // `fetch` below. @@ -328,44 +214,17 @@ let name = format!("origin/{}", s); let b = repo .find_branch(&name, git2::BranchType::Remote) - .chain_err(|| format!("failed to find branch `{}`", s))?; + .with_context(|| format!("failed to find branch `{}`", s))?; b.get() .target() .ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", s))? } - // See the module docs for why we're using `master` here. + // We'll be using the HEAD commit GitReference::DefaultBranch => { - let master = repo - .find_branch("origin/master", git2::BranchType::Remote) - .chain_err(|| "failed to find branch `master`")?; - let master = master - .get() - .target() - .ok_or_else(|| anyhow::format_err!("branch `master` did not have a target"))?; - - if let Some((remote, config)) = remote_and_config { - let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?; - let head = repo.find_object(head_id, None)?; - let head = head.peel(ObjectType::Commit)?.id(); - - if head != master { - config.shell().warn(&format!( - "\ - fetching `master` branch from `{}` but the `HEAD` \ - reference for this repository is not the \ - `master` branch. This behavior will change \ - in Cargo in the future and your build may \ - break, so it's recommended to place \ - `branch = \"master\"` in Cargo.toml when \ - depending on this git repository to ensure \ - that your build will continue to work.\ - ", - remote, - ))?; - } - } - master + let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?; + let head = repo.find_object(head_id, None)?; + head.peel(ObjectType::Commit)?.id() } GitReference::Rev(s) => { @@ -490,7 +349,7 @@ info!("update submodules for: {:?}", repo.workdir().unwrap()); for mut child in repo.submodules()? { - update_submodule(repo, &mut child, cargo_config).chain_err(|| { + update_submodule(repo, &mut child, cargo_config).with_context(|| { format!( "failed to update submodule `{}`", child.name().unwrap_or("") @@ -543,7 +402,7 @@ cargo_config .shell() .status("Updating", format!("git submodule `{}`", url))?; - fetch(&mut repo, url, &reference, cargo_config).chain_err(|| { + fetch(&mut repo, url, &reference, cargo_config).with_context(|| { format!( "failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), @@ -899,8 +758,6 @@ } GitReference::DefaultBranch => { - // See the module docs for why we're fetching `master` here. - refspecs.push(String::from("refs/heads/master:refs/remotes/origin/master")); refspecs.push(String::from("HEAD:refs/remotes/origin/HEAD")); } @@ -977,7 +834,7 @@ tags: bool, config: &Config, ) -> CargoResult<()> { - let mut cmd = process("git"); + let mut cmd = ProcessBuilder::new("git"); cmd.arg("fetch"); if tags { cmd.arg("--tags"); @@ -1166,10 +1023,7 @@ handle.useragent("cargo")?; let mut headers = List::new(); headers.append("Accept: application/vnd.github.3.sha")?; - headers.append(&format!( - "If-None-Match: \"{}\"", - reference.resolve(repo, None)? - ))?; + headers.append(&format!("If-None-Match: \"{}\"", reference.resolve(repo)?))?; handle.http_headers(headers)?; handle.perform()?; Ok(handle.response_code()? == 304) diff -Nru cargo-0.53.0/src/cargo/sources/path.rs cargo-0.54.0/src/cargo/sources/path.rs --- cargo-0.53.0/src/cargo/sources/path.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/path.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,16 +2,17 @@ use std::fs; use std::path::{Path, PathBuf}; +use crate::core::source::MaybePackage; +use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; +use crate::ops; +use crate::util::{internal, CargoResult, Config}; +use anyhow::Context as _; +use cargo_util::paths; use filetime::FileTime; use ignore::gitignore::GitignoreBuilder; use ignore::Match; use log::{trace, warn}; -use crate::core::source::MaybePackage; -use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::ops; -use crate::util::{internal, paths, CargoResult, CargoResultExt, Config}; - pub struct PathSource<'cfg> { source_id: SourceId, path: PathBuf, @@ -96,7 +97,7 @@ /// are relevant for building this package, but it also contains logic to /// use other methods like .gitignore to filter the list of files. pub fn list_files(&self, pkg: &Package) -> CargoResult> { - self._list_files(pkg).chain_err(|| { + self._list_files(pkg).with_context(|| { format!( "failed to determine list of files in {}", pkg.root().display() @@ -190,7 +191,7 @@ }; let index = repo .index() - .chain_err(|| format!("failed to open git index at {}", repo.path().display()))?; + .with_context(|| format!("failed to open git index at {}", repo.path().display()))?; let repo_root = repo.workdir().ok_or_else(|| { anyhow::format_err!( "did not expect repo at {} to be bare", @@ -411,7 +412,7 @@ // TODO: drop `collect` and sort after transition period and dropping warning tests. // See rust-lang/cargo#4268 and rust-lang/cargo#4270. let mut entries: Vec = fs::read_dir(path) - .chain_err(|| format!("cannot read {:?}", path))? + .with_context(|| format!("cannot read {:?}", path))? .map(|e| e.unwrap().path()) .collect(); entries.sort_unstable_by(|a, b| a.as_os_str().cmp(b.as_os_str())); @@ -436,7 +437,7 @@ let mut max = FileTime::zero(); let mut max_path = PathBuf::new(); - for file in self.list_files(pkg).chain_err(|| { + for file in self.list_files(pkg).with_context(|| { format!( "failed to determine the most recently modified file in {}", pkg.root().display() diff -Nru cargo-0.53.0/src/cargo/sources/registry/index.rs cargo-0.54.0/src/cargo/sources/registry/index.rs --- cargo-0.53.0/src/cargo/sources/registry/index.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/index.rs 2021-04-27 14:35:53.000000000 +0000 @@ -70,9 +70,9 @@ use crate::core::{PackageId, SourceId, Summary}; use crate::sources::registry::{RegistryData, RegistryPackage, INDEX_V_MAX}; use crate::util::interning::InternedString; -use crate::util::paths; use crate::util::{internal, CargoResult, Config, Filesystem, ToSemver}; use anyhow::bail; +use cargo_util::paths; use log::{debug, info}; use semver::{Version, VersionReq}; use std::collections::{HashMap, HashSet}; diff -Nru cargo-0.53.0/src/cargo/sources/registry/local.rs cargo-0.54.0/src/cargo/sources/registry/local.rs --- cargo-0.53.0/src/cargo/sources/registry/local.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/local.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,8 +2,8 @@ use crate::sources::registry::{MaybeLock, RegistryConfig, RegistryData}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{Config, Filesystem, Sha256}; +use crate::util::{Config, Filesystem}; +use cargo_util::{paths, Sha256}; use std::fs::File; use std::io::prelude::*; use std::io::SeekFrom; diff -Nru cargo-0.53.0/src/cargo/sources/registry/mod.rs cargo-0.54.0/src/cargo/sources/registry/mod.rs --- cargo-0.53.0/src/cargo/sources/registry/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -165,6 +165,7 @@ use std::io::Write; use std::path::{Path, PathBuf}; +use anyhow::Context as _; use flate2::read::GzDecoder; use log::debug; use semver::{Version, VersionReq}; @@ -175,7 +176,6 @@ use crate::core::source::MaybePackage; use crate::core::{Package, PackageId, Source, SourceId, Summary}; use crate::sources::PathSource; -use crate::util::errors::CargoResultExt; use crate::util::hex; use crate::util::interning::InternedString; use crate::util::into_url::IntoUrl; @@ -600,10 +600,10 @@ let prefix = unpack_dir.file_name().unwrap(); let parent = unpack_dir.parent().unwrap(); for entry in tar.entries()? { - let mut entry = entry.chain_err(|| "failed to iterate over archive")?; + let mut entry = entry.with_context(|| "failed to iterate over archive")?; let entry_path = entry .path() - .chain_err(|| "failed to read entry path")? + .with_context(|| "failed to read entry path")? .into_owned(); // We're going to unpack this tarball into the global source @@ -623,7 +623,7 @@ // Unpacking failed let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from); if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) { - result = result.chain_err(|| { + result = result.with_context(|| { format!( "`{}` appears to contain a reserved Windows path, \ it cannot be extracted on Windows", @@ -631,7 +631,8 @@ ) }); } - result.chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; + result + .with_context(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; } // The lock file is created after unpacking so we overwrite a lock file @@ -641,7 +642,7 @@ .read(true) .write(true) .open(&path) - .chain_err(|| format!("failed to open `{}`", path.display()))?; + .with_context(|| format!("failed to open `{}`", path.display()))?; // Write to the lock file to indicate that unpacking was successful. write!(ok, "ok")?; @@ -660,7 +661,7 @@ fn get_pkg(&mut self, package: PackageId, path: &File) -> CargoResult { let path = self .unpack_package(package, path) - .chain_err(|| format!("failed to unpack package `{}`", package))?; + .with_context(|| format!("failed to unpack package `{}`", package))?; let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; let mut pkg = match src.download(package)? { diff -Nru cargo-0.53.0/src/cargo/sources/registry/remote.rs cargo-0.54.0/src/cargo/sources/registry/remote.rs --- cargo-0.53.0/src/cargo/sources/registry/remote.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/remote.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,10 +5,11 @@ RegistryConfig, RegistryData, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, PREFIX_TEMPLATE, VERSION_TEMPLATE, }; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{Config, Filesystem, Sha256}; +use crate::util::{Config, Filesystem}; +use anyhow::Context as _; +use cargo_util::{paths, Sha256}; use lazycell::LazyCell; use log::{debug, trace}; use std::cell::{Cell, Ref, RefCell}; @@ -97,7 +98,7 @@ let mut opts = git2::RepositoryInitOptions::new(); opts.external_template(false); Ok(git2::Repository::init_opts(&path, &opts) - .chain_err(|| "failed to initialize index git repository")?) + .with_context(|| "failed to initialize index git repository")?) } } }) @@ -106,7 +107,7 @@ fn head(&self) -> CargoResult { if self.head.get().is_none() { let repo = self.repo()?; - let oid = self.index_git_ref.resolve(repo, None)?; + let oid = self.index_git_ref.resolve(repo)?; self.head.set(Some(oid)); } Ok(self.head.get().unwrap()) @@ -241,7 +242,7 @@ let url = self.source_id.url(); let repo = self.repo.borrow_mut().unwrap(); git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) - .chain_err(|| format!("failed to fetch `{}`", url))?; + .with_context(|| format!("failed to fetch `{}`", url))?; self.config.updated_sources().insert(self.source_id); // Create a dummy file to record the mtime for when we updated the @@ -312,7 +313,7 @@ .read(true) .write(true) .open(&path) - .chain_err(|| format!("failed to open `{}`", path.display()))?; + .with_context(|| format!("failed to open `{}`", path.display()))?; let meta = dst.metadata()?; if meta.len() > 0 { return Ok(dst); diff -Nru cargo-0.53.0/src/cargo/sources/replaced.rs cargo-0.54.0/src/cargo/sources/replaced.rs --- cargo-0.53.0/src/cargo/sources/replaced.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/replaced.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,8 @@ use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; + +use anyhow::Context as _; pub struct ReplacedSource<'cfg> { to_replace: SourceId, @@ -47,7 +49,7 @@ .query(&dep, &mut |summary| { f(summary.map_source(replace_with, to_replace)) }) - .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to query replaced source {}", self.to_replace))?; Ok(()) } @@ -59,14 +61,14 @@ .fuzzy_query(&dep, &mut |summary| { f(summary.map_source(replace_with, to_replace)) }) - .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to query replaced source {}", self.to_replace))?; Ok(()) } fn update(&mut self) -> CargoResult<()> { self.inner .update() - .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to update replaced source {}", self.to_replace))?; Ok(()) } @@ -75,7 +77,7 @@ let pkg = self .inner .download(id) - .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(match pkg { MaybePackage::Ready(pkg) => { MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace)) @@ -89,7 +91,7 @@ let pkg = self .inner .finish_download(id, data) - .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(pkg.map_source(self.replace_with, self.to_replace)) } diff -Nru cargo-0.53.0/src/cargo/util/command_prelude.rs cargo-0.54.0/src/cargo/util/command_prelude.rs --- cargo-0.53.0/src/cargo/util/command_prelude.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/command_prelude.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,17 +1,19 @@ use crate::core::compiler::{BuildConfig, MessageFormat}; +use crate::core::resolver::CliFeatures; use crate::core::{Edition, Workspace}; use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; use crate::sources::CRATES_IO_REGISTRY; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::interning::InternedString; use crate::util::restricted_names::is_glob_pattern; -use crate::util::{paths, toml::TomlProfile, validate_package_name}; use crate::util::{ print_available_benches, print_available_binaries, print_available_examples, print_available_packages, print_available_tests, }; +use crate::util::{toml::TomlProfile, validate_package_name}; use crate::CargoResult; use anyhow::bail; +use cargo_util::paths; use clap::{self, SubCommand}; use std::ffi::{OsStr, OsString}; use std::path::PathBuf; @@ -486,7 +488,7 @@ // TODO: Tracking issue .fail_if_stable_opt("--future-incompat-report", 9241)?; - if !config.cli_unstable().enable_future_incompat_feature { + if !config.cli_unstable().future_incompat_report { anyhow::bail!( "Usage of `--future-incompat-report` requires `-Z future-incompat-report`" ) @@ -495,9 +497,7 @@ let opts = CompileOptions { build_config, - features: self._values_of("features"), - all_features: self._is_present("all-features"), - no_default_features: self._is_present("no-default-features"), + cli_features: self.cli_features()?, spec, filter: CompileFilter::from_raw_arguments( self._is_present("lib"), @@ -539,6 +539,14 @@ Ok(opts) } + fn cli_features(&self) -> CargoResult { + CliFeatures::from_command_line( + &self._values_of("features"), + self._is_present("all-features"), + !self._is_present("no-default-features"), + ) + } + fn compile_options_for_single_package( &self, config: &Config, diff -Nru cargo-0.53.0/src/cargo/util/config/de.rs cargo-0.54.0/src/cargo/util/config/de.rs --- cargo-0.53.0/src/cargo/util/config/de.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/de.rs 2021-04-27 14:35:53.000000000 +0000 @@ -40,62 +40,6 @@ }; } -impl<'config> Deserializer<'config> { - /// This is a helper for getting a CV from a file or env var. - /// - /// If this returns CV::List, then don't look at the value. Handling lists - /// is deferred to ConfigSeqAccess. - fn get_cv_with_env(&self) -> Result, ConfigError> { - // Determine if value comes from env, cli, or file, and merge env if - // possible. - let cv = self.config.get_cv(&self.key)?; - let env = self.config.env.get(self.key.as_env_key()); - let env_def = Definition::Environment(self.key.as_env_key().to_string()); - let use_env = match (&cv, env) { - (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()), - (None, Some(_)) => true, - _ => false, - }; - - if !use_env { - return Ok(cv); - } - - // Future note: If you ever need to deserialize a non-self describing - // map type, this should implement a starts_with check (similar to how - // ConfigMapAccess does). - let env = env.unwrap(); - if env == "true" { - Ok(Some(CV::Boolean(true, env_def))) - } else if env == "false" { - Ok(Some(CV::Boolean(false, env_def))) - } else if let Ok(i) = env.parse::() { - Ok(Some(CV::Integer(i, env_def))) - } else if self.config.cli_unstable().advanced_env - && env.starts_with('[') - && env.ends_with(']') - { - // Parsing is deferred to ConfigSeqAccess. - Ok(Some(CV::List(Vec::new(), env_def))) - } else { - // Try to merge if possible. - match cv { - Some(CV::List(cv_list, _cv_def)) => { - // Merging is deferred to ConfigSeqAccess. - Ok(Some(CV::List(cv_list, env_def))) - } - _ => { - // Note: CV::Table merging is not implemented, as env - // vars do not support table values. In the future, we - // could check for `{}`, and interpret it as TOML if - // that seems useful. - Ok(Some(CV::String(env.to_string(), env_def))) - } - } - } - } -} - impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> { type Error = ConfigError; @@ -103,7 +47,7 @@ where V: de::Visitor<'de>, { - let cv = self.get_cv_with_env()?; + let cv = self.config.get_cv_with_env(&self.key)?; if let Some(cv) = cv { let res: (Result, Definition) = match cv { CV::Integer(i, def) => (visitor.visit_i64(i), def), diff -Nru cargo-0.53.0/src/cargo/util/config/key.rs cargo-0.54.0/src/cargo/util/config/key.rs --- cargo-0.53.0/src/cargo/util/config/key.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/key.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::fmt; /// Key for a configuration variable. @@ -84,16 +85,32 @@ } /// Returns an iterator of the key parts as strings. - pub(super) fn parts(&self) -> impl Iterator { + pub(crate) fn parts(&self) -> impl Iterator { self.parts.iter().map(|p| p.0.as_ref()) } + + /// Returns whether or not this is a key for the root table. + pub fn is_root(&self) -> bool { + self.parts.is_empty() + } } impl fmt::Display for ConfigKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Note: This is not a perfect TOML representation. This really should - // check if the parts should be quoted. - let parts: Vec<&str> = self.parts().collect(); + let parts: Vec<_> = self.parts().map(|part| escape_key_part(part)).collect(); parts.join(".").fmt(f) } } + +fn escape_key_part<'a>(part: &'a str) -> Cow<'a, str> { + let ok = part.chars().all(|c| { + matches!(c, + 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_') + }); + if ok { + Cow::Borrowed(part) + } else { + // This is a bit messy, but toml doesn't expose a function to do this. + Cow::Owned(toml::to_string(&toml::Value::String(part.to_string())).unwrap()) + } +} diff -Nru cargo-0.53.0/src/cargo/util/config/mod.rs cargo-0.54.0/src/cargo/util/config/mod.rs --- cargo-0.53.0/src/cargo/util/config/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -65,21 +65,21 @@ use std::sync::Once; use std::time::Instant; -use anyhow::{anyhow, bail, format_err}; -use curl::easy::Easy; -use lazycell::LazyCell; -use serde::Deserialize; -use url::Url; - use self::ConfigValue as CV; use crate::core::compiler::rustdoc::RustdocExternMap; use crate::core::shell::Verbosity; use crate::core::{features, CliUnstable, Shell, SourceId, Workspace}; use crate::ops; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::toml as cargo_toml; -use crate::util::{paths, validate_package_name}; +use crate::util::validate_package_name; use crate::util::{FileLock, Filesystem, IntoUrl, IntoUrlWithBase, Rustc}; +use anyhow::{anyhow, bail, format_err, Context as _}; +use cargo_util::paths; +use curl::easy::Easy; +use lazycell::LazyCell; +use serde::Deserialize; +use url::Url; mod de; use de::Deserializer; @@ -88,7 +88,7 @@ pub use value::{Definition, OptValue, Value}; mod key; -use key::ConfigKey; +pub use key::ConfigKey; mod path; pub use path::{ConfigRelativePath, PathAndArgs}; @@ -294,8 +294,8 @@ /// any config files from disk. Those will be loaded lazily as-needed. pub fn default() -> CargoResult { let shell = Shell::new(); - let cwd = - env::current_dir().chain_err(|| "couldn't get the current directory of the process")?; + let cwd = env::current_dir() + .with_context(|| "couldn't get the current directory of the process")?; let homedir = homedir(&cwd).ok_or_else(|| { anyhow!( "Cargo couldn't find your home directory. \ @@ -411,7 +411,7 @@ let exe = from_current_exe() .or_else(|_| from_argv()) - .chain_err(|| "couldn't get the path to cargo executable")?; + .with_context(|| "couldn't get the path to cargo executable")?; Ok(exe) }) .map(AsRef::as_ref) @@ -522,6 +522,14 @@ fn get_cv(&self, key: &ConfigKey) -> CargoResult> { log::trace!("get cv {:?}", key); let vals = self.values()?; + if key.is_root() { + // Returning the entire root table (for example `cargo config get` + // with no key). The definition here shouldn't matter. + return Ok(Some(CV::Table( + vals.clone(), + Definition::Path(PathBuf::new()), + ))); + } let mut parts = key.parts().enumerate(); let mut val = match vals.get(parts.next().unwrap().1) { Some(val) => val, @@ -539,12 +547,14 @@ | CV::String(_, def) | CV::List(_, def) | CV::Boolean(_, def) => { - let key_so_far: Vec<&str> = key.parts().take(i).collect(); + let mut key_so_far = ConfigKey::new(); + for part in key.parts().take(i) { + key_so_far.push(part); + } bail!( "expected table for configuration key `{}`, \ but found {} in {}", - // This join doesn't handle quoting properly. - key_so_far.join("."), + key_so_far, val.desc(), def ) @@ -554,11 +564,94 @@ Ok(Some(val.clone())) } + /// This is a helper for getting a CV from a file or env var. + pub(crate) fn get_cv_with_env(&self, key: &ConfigKey) -> CargoResult> { + // Determine if value comes from env, cli, or file, and merge env if + // possible. + let cv = self.get_cv(key)?; + if key.is_root() { + // Root table can't have env value. + return Ok(cv); + } + let env = self.env.get(key.as_env_key()); + let env_def = Definition::Environment(key.as_env_key().to_string()); + let use_env = match (&cv, env) { + // Lists are always merged. + (Some(CV::List(..)), Some(_)) => true, + (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()), + (None, Some(_)) => true, + _ => false, + }; + + if !use_env { + return Ok(cv); + } + + // Future note: If you ever need to deserialize a non-self describing + // map type, this should implement a starts_with check (similar to how + // ConfigMapAccess does). + let env = env.unwrap(); + if env == "true" { + Ok(Some(CV::Boolean(true, env_def))) + } else if env == "false" { + Ok(Some(CV::Boolean(false, env_def))) + } else if let Ok(i) = env.parse::() { + Ok(Some(CV::Integer(i, env_def))) + } else if self.cli_unstable().advanced_env && env.starts_with('[') && env.ends_with(']') { + match cv { + Some(CV::List(mut cv_list, cv_def)) => { + // Merge with config file. + self.get_env_list(key, &mut cv_list)?; + Ok(Some(CV::List(cv_list, cv_def))) + } + Some(cv) => { + // This can't assume StringList or UnmergedStringList. + // Return an error, which is the behavior of merging + // multiple config.toml files with the same scenario. + bail!( + "unable to merge array env for config `{}`\n\ + file: {:?}\n\ + env: {}", + key, + cv, + env + ); + } + None => { + let mut cv_list = Vec::new(); + self.get_env_list(key, &mut cv_list)?; + Ok(Some(CV::List(cv_list, env_def))) + } + } + } else { + // Try to merge if possible. + match cv { + Some(CV::List(mut cv_list, cv_def)) => { + // Merge with config file. + self.get_env_list(key, &mut cv_list)?; + Ok(Some(CV::List(cv_list, cv_def))) + } + _ => { + // Note: CV::Table merging is not implemented, as env + // vars do not support table values. In the future, we + // could check for `{}`, and interpret it as TOML if + // that seems useful. + Ok(Some(CV::String(env.to_string(), env_def))) + } + } + } + } + /// Helper primarily for testing. pub fn set_env(&mut self, env: HashMap) { self.env = env; } + /// Returns all environment variables. + pub(crate) fn env(&self) -> &HashMap { + &self.env + } + fn get_env(&self, key: &ConfigKey) -> Result, ConfigError> where T: FromStr, @@ -804,6 +897,15 @@ self.cli_config = Some(cli_config.iter().map(|s| s.to_string()).collect()); self.merge_cli_args()?; } + if self.unstable_flags.config_include { + // If the config was already loaded (like when fetching the + // `[alias]` table), it was loaded with includes disabled because + // the `unstable_flags` hadn't been set up, yet. Any values + // fetched before this step will not process includes, but that + // should be fine (`[alias]` is one of the only things loaded + // before configure). This can be removed when stabilized. + self.reload_rooted_at(self.cwd.clone())?; + } let extra_verbose = verbose >= 2; let verbose = verbose != 0; @@ -903,6 +1005,39 @@ self.load_values_from(&self.cwd) } + pub(crate) fn load_values_unmerged(&self) -> CargoResult> { + let mut result = Vec::new(); + let mut seen = HashSet::new(); + let home = self.home_path.clone().into_path_unlocked(); + self.walk_tree(&self.cwd, &home, |path| { + let mut cv = self._load_file(path, &mut seen, false)?; + if self.cli_unstable().config_include { + self.load_unmerged_include(&mut cv, &mut seen, &mut result)?; + } + result.push(cv); + Ok(()) + }) + .with_context(|| "could not load Cargo configuration")?; + Ok(result) + } + + fn load_unmerged_include( + &self, + cv: &mut CV, + seen: &mut HashSet, + output: &mut Vec, + ) -> CargoResult<()> { + let includes = self.include_paths(cv, false)?; + for (path, abs_path, def) in includes { + let mut cv = self._load_file(&abs_path, seen, false).with_context(|| { + format!("failed to load config include `{}` from `{}`", path, def) + })?; + self.load_unmerged_include(&mut cv, seen, output)?; + output.push(cv); + } + Ok(()) + } + fn load_values_from(&self, path: &Path) -> CargoResult> { // This definition path is ignored, this is just a temporary container // representing the entire file. @@ -910,12 +1045,13 @@ let home = self.home_path.clone().into_path_unlocked(); self.walk_tree(path, &home, |path| { - let value = self.load_file(path)?; - cfg.merge(value, false) - .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?; + let value = self.load_file(path, true)?; + cfg.merge(value, false).with_context(|| { + format!("failed to merge configuration at `{}`", path.display()) + })?; Ok(()) }) - .chain_err(|| "could not load Cargo configuration")?; + .with_context(|| "could not load Cargo configuration")?; match cfg { CV::Table(map, _) => Ok(map), @@ -923,12 +1059,17 @@ } } - fn load_file(&self, path: &Path) -> CargoResult { + fn load_file(&self, path: &Path, includes: bool) -> CargoResult { let mut seen = HashSet::new(); - self._load_file(path, &mut seen) + self._load_file(path, &mut seen, includes) } - fn _load_file(&self, path: &Path, seen: &mut HashSet) -> CargoResult { + fn _load_file( + &self, + path: &Path, + seen: &mut HashSet, + includes: bool, + ) -> CargoResult { if !seen.insert(path.to_path_buf()) { bail!( "config `include` cycle detected with path `{}`", @@ -936,17 +1077,22 @@ ); } let contents = fs::read_to_string(path) - .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?; - let toml = cargo_toml::parse(&contents, path, self) - .chain_err(|| format!("could not parse TOML configuration in `{}`", path.display()))?; - let value = CV::from_toml(Definition::Path(path.to_path_buf()), toml).chain_err(|| { - format!( - "failed to load TOML configuration from `{}`", - path.display() - ) + .with_context(|| format!("failed to read configuration file `{}`", path.display()))?; + let toml = cargo_toml::parse(&contents, path, self).with_context(|| { + format!("could not parse TOML configuration in `{}`", path.display()) })?; - let value = self.load_includes(value, seen)?; - Ok(value) + let value = + CV::from_toml(Definition::Path(path.to_path_buf()), toml).with_context(|| { + format!( + "failed to load TOML configuration from `{}`", + path.display() + ) + })?; + if includes { + self.load_includes(value, seen) + } else { + Ok(value) + } } /// Load any `include` files listed in the given `value`. @@ -956,49 +1102,72 @@ /// `seen` is used to check for cyclic includes. fn load_includes(&self, mut value: CV, seen: &mut HashSet) -> CargoResult { // Get the list of files to load. - let (includes, def) = match &mut value { - CV::Table(table, _def) => match table.remove("include") { - Some(CV::String(s, def)) => (vec![(s, def.clone())], def), - Some(CV::List(list, def)) => (list, def), - Some(other) => bail!( - "`include` expected a string or list, but found {} in `{}`", - other.desc(), - other.definition() - ), - None => { - return Ok(value); - } - }, - _ => unreachable!(), - }; + let includes = self.include_paths(&mut value, true)?; // Check unstable. if !self.cli_unstable().config_include { - self.shell().warn(format!("config `include` in `{}` ignored, the -Zconfig-include command-line flag is required", - def))?; return Ok(value); } // Accumulate all values here. let mut root = CV::Table(HashMap::new(), value.definition().clone()); - for (path, def) in includes { - let abs_path = match &def { - Definition::Path(p) => p.parent().unwrap().join(&path), - Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), - }; - self._load_file(&abs_path, seen) + for (path, abs_path, def) in includes { + self._load_file(&abs_path, seen, true) .and_then(|include| root.merge(include, true)) - .chain_err(|| format!("failed to load config include `{}` from `{}`", path, def))?; + .with_context(|| { + format!("failed to load config include `{}` from `{}`", path, def) + })?; } root.merge(value, true)?; Ok(root) } - /// Add config arguments passed on the command line. - fn merge_cli_args(&mut self) -> CargoResult<()> { + /// Converts the `include` config value to a list of absolute paths. + fn include_paths( + &self, + cv: &mut CV, + remove: bool, + ) -> CargoResult> { + let abs = |path: &String, def: &Definition| -> (String, PathBuf, Definition) { + let abs_path = match def { + Definition::Path(p) => p.parent().unwrap().join(&path), + Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), + }; + (path.to_string(), abs_path, def.clone()) + }; + let table = match cv { + CV::Table(table, _def) => table, + _ => unreachable!(), + }; + let owned; + let include = if remove { + owned = table.remove("include"); + owned.as_ref() + } else { + table.get("include") + }; + let includes = match include { + Some(CV::String(s, def)) => { + vec![abs(s, def)] + } + Some(CV::List(list, _def)) => list.iter().map(|(s, def)| abs(s, def)).collect(), + Some(other) => bail!( + "`include` expected a string or list, but found {} in `{}`", + other.desc(), + other.definition() + ), + None => { + return Ok(Vec::new()); + } + }; + Ok(includes) + } + + /// Parses the CLI config args and returns them as a table. + pub(crate) fn cli_args_as_table(&self) -> CargoResult { + let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); let cli_args = match &self.cli_config { Some(cli_args) => cli_args, - None => return Ok(()), + None => return Ok(loaded_args), }; - let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); for arg in cli_args { let arg_as_path = self.cwd.join(arg); let tmp_table = if !arg.is_empty() && arg_as_path.exists() { @@ -1017,7 +1186,7 @@ // TODO: This should probably use a more narrow parser, reject // comments, blank lines, [headers], etc. let toml_v: toml::Value = toml::de::from_str(arg) - .chain_err(|| format!("failed to parse --config argument `{}`", arg))?; + .with_context(|| format!("failed to parse --config argument `{}`", arg))?; let toml_table = toml_v.as_table().unwrap(); if toml_table.len() != 1 { bail!( @@ -1027,29 +1196,34 @@ ); } CV::from_toml(Definition::Cli, toml_v) - .chain_err(|| format!("failed to convert --config argument `{}`", arg))? + .with_context(|| format!("failed to convert --config argument `{}`", arg))? }; let mut seen = HashSet::new(); let tmp_table = self .load_includes(tmp_table, &mut seen) - .chain_err(|| "failed to load --config include".to_string())?; + .with_context(|| "failed to load --config include".to_string())?; loaded_args .merge(tmp_table, true) - .chain_err(|| format!("failed to merge --config argument `{}`", arg))?; + .with_context(|| format!("failed to merge --config argument `{}`", arg))?; } - // Force values to be loaded. - let _ = self.values()?; - let values = self.values_mut()?; - let loaded_map = match loaded_args { + Ok(loaded_args) + } + + /// Add config arguments passed on the command line. + fn merge_cli_args(&mut self) -> CargoResult<()> { + let loaded_map = match self.cli_args_as_table()? { CV::Table(table, _def) => table, _ => unreachable!(), }; + // Force values to be loaded. + let _ = self.values()?; + let values = self.values_mut()?; for (key, value) in loaded_map.into_iter() { match values.entry(key) { Vacant(entry) => { entry.insert(value); } - Occupied(mut entry) => entry.get_mut().merge(value, true).chain_err(|| { + Occupied(mut entry) => entry.get_mut().merge(value, true).with_context(|| { format!( "failed to merge --config key `{}` into `{}`", entry.key(), @@ -1135,7 +1309,7 @@ pub fn get_registry_index(&self, registry: &str) -> CargoResult { validate_package_name(registry, "registry name", "")?; if let Some(index) = self.get_string(&format!("registries.{}.index", registry))? { - self.resolve_registry_index(&index).chain_err(|| { + self.resolve_registry_index(&index).with_context(|| { format!( "invalid index URL for registry `{}` defined in {}", registry, index.definition @@ -1180,7 +1354,7 @@ None => return Ok(()), }; - let mut value = self.load_file(&credentials)?; + let mut value = self.load_file(&credentials, true)?; // Backwards compatibility for old `.cargo/credentials` layout. { let (value_map, def) = match value { @@ -1404,7 +1578,7 @@ return Ok(PackageCacheLock(self)); } - Err(e).chain_err(|| "failed to acquire package cache lock")?; + Err(e).with_context(|| "failed to acquire package cache lock")?; } } } @@ -1556,7 +1730,7 @@ val.into_iter() .map(|(key, value)| { let value = CV::from_toml(def.clone(), value) - .chain_err(|| format!("failed to parse key `{}`", key))?; + .with_context(|| format!("failed to parse key `{}`", key))?; Ok((key, value)) }) .collect::>()?, @@ -1602,7 +1776,7 @@ Occupied(mut entry) => { let new_def = value.definition().clone(); let entry = entry.get_mut(); - entry.merge(value, force).chain_err(|| { + entry.merge(value, force).with_context(|| { format!( "failed to merge key `{}` between \ {} and {}", @@ -1735,7 +1909,7 @@ }; let mut contents = String::new(); - file.read_to_string(&mut contents).chain_err(|| { + file.read_to_string(&mut contents).with_context(|| { format!( "failed to read configuration file `{}`", file.path().display() @@ -1804,10 +1978,10 @@ let contents = toml.to_string(); file.seek(SeekFrom::Start(0))?; file.write_all(contents.as_bytes()) - .chain_err(|| format!("failed to write to `{}`", file.path().display()))?; + .with_context(|| format!("failed to write to `{}`", file.path().display()))?; file.file().set_len(contents.len() as u64)?; set_permissions(file.file(), 0o600) - .chain_err(|| format!("failed to set permissions of `{}`", file.path().display()))?; + .with_context(|| format!("failed to set permissions of `{}`", file.path().display()))?; return Ok(()); diff -Nru cargo-0.53.0/src/cargo/util/diagnostic_server.rs cargo-0.54.0/src/cargo/util/diagnostic_server.rs --- cargo-0.53.0/src/cargo/util/diagnostic_server.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/diagnostic_server.rs 2021-04-27 14:35:53.000000000 +0000 @@ -10,12 +10,13 @@ use std::thread::{self, JoinHandle}; use anyhow::{Context, Error}; +use cargo_util::ProcessBuilder; use log::warn; use serde::{Deserialize, Serialize}; use crate::core::Edition; use crate::util::errors::CargoResult; -use crate::util::{Config, ProcessBuilder}; +use crate::util::Config; const DIAGNOSICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER"; const PLEASE_REPORT_THIS_BUG: &str = diff -Nru cargo-0.53.0/src/cargo/util/errors.rs cargo-0.54.0/src/cargo/util/errors.rs --- cargo-0.53.0/src/cargo/util/errors.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/errors.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,34 +3,12 @@ use crate::core::{TargetKind, Workspace}; use crate::ops::CompileOptions; use anyhow::Error; +use cargo_util::ProcessError; use std::fmt; use std::path::PathBuf; -use std::process::{ExitStatus, Output}; -use std::str; pub type CargoResult = anyhow::Result; -// TODO: should delete this trait and just use `with_context` instead -pub trait CargoResultExt { - fn chain_err(self, f: F) -> CargoResult - where - F: FnOnce() -> D, - D: fmt::Display + Send + Sync + 'static; -} - -impl CargoResultExt for Result -where - E: Into, -{ - fn chain_err(self, f: F) -> CargoResult - where - F: FnOnce() -> D, - D: fmt::Display + Send + Sync + 'static, - { - self.map_err(|e| e.into().context(f())) - } -} - #[derive(Debug)] pub struct HttpNot200 { pub code: u32, @@ -187,41 +165,6 @@ impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {} // ============================================================================= -// Process errors -#[derive(Debug)] -pub struct ProcessError { - /// A detailed description to show to the user why the process failed. - pub desc: String, - - /// The exit status of the process. - /// - /// This can be `None` if the process failed to launch (like process not - /// found) or if the exit status wasn't a code but was instead something - /// like termination via a signal. - pub code: Option, - - /// The stdout from the process. - /// - /// This can be `None` if the process failed to launch, or the output was - /// not captured. - pub stdout: Option>, - - /// The stderr from the process. - /// - /// This can be `None` if the process failed to launch, or the output was - /// not captured. - pub stderr: Option>, -} - -impl fmt::Display for ProcessError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.desc.fmt(f) - } -} - -impl std::error::Error for ProcessError {} - -// ============================================================================= // Cargo test errors. /// Error when testcases fail @@ -360,154 +303,6 @@ // ============================================================================= // Construction helpers -/// Creates a new process error. -/// -/// `status` can be `None` if the process did not launch. -/// `output` can be `None` if the process did not launch, or output was not captured. -pub fn process_error( - msg: &str, - status: Option, - output: Option<&Output>, -) -> ProcessError { - let exit = match status { - Some(s) => exit_status_to_string(s), - None => "never executed".to_string(), - }; - - process_error_raw( - msg, - status.and_then(|s| s.code()), - &exit, - output.map(|s| s.stdout.as_slice()), - output.map(|s| s.stderr.as_slice()), - ) -} - -pub fn process_error_raw( - msg: &str, - code: Option, - status: &str, - stdout: Option<&[u8]>, - stderr: Option<&[u8]>, -) -> ProcessError { - let mut desc = format!("{} ({})", msg, status); - - if let Some(out) = stdout { - match str::from_utf8(out) { - Ok(s) if !s.trim().is_empty() => { - desc.push_str("\n--- stdout\n"); - desc.push_str(s); - } - Ok(..) | Err(..) => {} - } - } - if let Some(out) = stderr { - match str::from_utf8(out) { - Ok(s) if !s.trim().is_empty() => { - desc.push_str("\n--- stderr\n"); - desc.push_str(s); - } - Ok(..) | Err(..) => {} - } - } - - ProcessError { - desc, - code, - stdout: stdout.map(|s| s.to_vec()), - stderr: stderr.map(|s| s.to_vec()), - } -} - -pub fn exit_status_to_string(status: ExitStatus) -> String { - return status_to_string(status); - - #[cfg(unix)] - fn status_to_string(status: ExitStatus) -> String { - use std::os::unix::process::*; - - if let Some(signal) = status.signal() { - let name = match signal as libc::c_int { - libc::SIGABRT => ", SIGABRT: process abort signal", - libc::SIGALRM => ", SIGALRM: alarm clock", - libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", - libc::SIGHUP => ", SIGHUP: hangup", - libc::SIGILL => ", SIGILL: illegal instruction", - libc::SIGINT => ", SIGINT: terminal interrupt signal", - libc::SIGKILL => ", SIGKILL: kill", - libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", - libc::SIGQUIT => ", SIGQUIT: terminal quit signal", - libc::SIGSEGV => ", SIGSEGV: invalid memory reference", - libc::SIGTERM => ", SIGTERM: termination signal", - libc::SIGBUS => ", SIGBUS: access to undefined memory", - #[cfg(not(target_os = "haiku"))] - libc::SIGSYS => ", SIGSYS: bad system call", - libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", - _ => "", - }; - format!("signal: {}{}", signal, name) - } else { - status.to_string() - } - } - - #[cfg(windows)] - fn status_to_string(status: ExitStatus) -> String { - use winapi::shared::minwindef::DWORD; - use winapi::um::winnt::*; - - let mut base = status.to_string(); - let extra = match status.code().unwrap() as DWORD { - STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", - STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", - STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", - STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", - STATUS_NO_MEMORY => "STATUS_NO_MEMORY", - STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", - STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", - STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", - STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", - STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", - STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", - STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", - STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", - STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", - STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", - STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", - STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", - STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", - STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", - STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", - STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", - STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", - STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", - STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", - STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", - STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", - STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", - STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", - STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", - STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", - STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", - _ => return base, - }; - base.push_str(", "); - base.push_str(extra); - base - } -} - -pub fn is_simple_exit_code(code: i32) -> bool { - // Typical unix exit codes are 0 to 127. - // Windows doesn't have anything "typical", and is a - // 32-bit number (which appears signed here, but is really - // unsigned). However, most of the interesting NTSTATUS - // codes are very large. This is just a rough - // approximation of which codes are "normal" and which - // ones are abnormal termination. - code >= 0 && code <= 127 -} - pub fn internal(error: S) -> anyhow::Error { InternalError::new(anyhow::format_err!("{}", error)).into() } diff -Nru cargo-0.53.0/src/cargo/util/flock.rs cargo-0.54.0/src/cargo/util/flock.rs --- cargo-0.53.0/src/cargo/util/flock.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/flock.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,12 +3,12 @@ use std::io::{Read, Seek, SeekFrom, Write}; use std::path::{Display, Path, PathBuf}; -use termcolor::Color::Cyan; - -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; +use crate::util::errors::CargoResult; use crate::util::Config; +use anyhow::Context as _; +use cargo_util::paths; use sys::*; +use termcolor::Color::Cyan; #[derive(Debug)] pub struct FileLock { @@ -225,7 +225,7 @@ Err(anyhow::Error::from(e)) } }) - .chain_err(|| format!("failed to open: {}", path.display()))?; + .with_context(|| format!("failed to open: {}", path.display()))?; match state { State::Exclusive => { acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| { @@ -314,7 +314,7 @@ let msg = format!("waiting for file lock on {}", msg); config.shell().status_with_color("Blocking", &msg, Cyan)?; - lock_block().chain_err(|| format!("failed to lock file: {}", path.display()))?; + lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?; return Ok(()); #[cfg(all(target_os = "linux", not(target_env = "musl")))] diff -Nru cargo-0.53.0/src/cargo/util/important_paths.rs cargo-0.54.0/src/cargo/util/important_paths.rs --- cargo-0.53.0/src/cargo/util/important_paths.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/important_paths.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,5 @@ use crate::util::errors::CargoResult; -use crate::util::paths; +use cargo_util::paths; use std::path::{Path, PathBuf}; /// Finds the root `Cargo.toml`. diff -Nru cargo-0.53.0/src/cargo/util/mod.rs cargo-0.54.0/src/cargo/util/mod.rs --- cargo-0.53.0/src/cargo/util/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,12 +1,12 @@ +use std::fmt; use std::time::Duration; pub use self::canonical_url::CanonicalUrl; pub use self::config::{homedir, Config, ConfigValue}; pub use self::dependency_queue::DependencyQueue; pub use self::diagnostic_server::RustfixDiagnosticServer; -pub use self::errors::{exit_status_to_string, internal, process_error, process_error_raw}; -pub use self::errors::{CargoResult, CargoResultExt, CliResult, Test}; -pub use self::errors::{CargoTestError, CliError, ProcessError}; +pub use self::errors::{internal, CargoResult, CliResult, Test}; +pub use self::errors::{CargoTestError, CliError}; pub use self::flock::{FileLock, Filesystem}; pub use self::graph::Graph; pub use self::hasher::StableHasher; @@ -15,15 +15,10 @@ pub use self::into_url_with_base::IntoUrlWithBase; pub use self::lev_distance::{closest, closest_msg, lev_distance}; pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; -pub use self::paths::{bytes2path, dylib_path, join_paths, path2bytes}; -pub use self::paths::{dylib_path_envvar, normalize_path}; -pub use self::process_builder::{process, ProcessBuilder}; pub use self::progress::{Progress, ProgressStyle}; pub use self::queue::Queue; -pub use self::read2::read2; pub use self::restricted_names::validate_package_name; pub use self::rustc::Rustc; -pub use self::sha256::Sha256; pub use self::to_semver::ToSemver; pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; pub use self::workspace::{ @@ -51,15 +46,11 @@ mod lockserver; pub mod machine_message; pub mod network; -pub mod paths; -pub mod process_builder; pub mod profile; mod progress; mod queue; -mod read2; pub mod restricted_names; pub mod rustc; -mod sha256; pub mod to_semver; pub mod toml; mod vcs; @@ -75,9 +66,30 @@ } } -/// Whether or not this running in a Continuous Integration environment. -pub fn is_ci() -> bool { - std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok() +pub fn iter_join_onto(mut w: W, iter: I, delim: &str) -> fmt::Result +where + W: fmt::Write, + I: IntoIterator, + T: std::fmt::Display, +{ + let mut it = iter.into_iter().peekable(); + while let Some(n) = it.next() { + write!(w, "{}", n)?; + if it.peek().is_some() { + write!(w, "{}", delim)?; + } + } + Ok(()) +} + +pub fn iter_join(iter: I, delim: &str) -> String +where + I: IntoIterator, + T: std::fmt::Display, +{ + let mut s = String::new(); + let _ = iter_join_onto(&mut s, iter, delim); + s } pub fn indented_lines(text: &str) -> String { diff -Nru cargo-0.53.0/src/cargo/util/paths.rs cargo-0.54.0/src/cargo/util/paths.rs --- cargo-0.53.0/src/cargo/util/paths.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/paths.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,634 +0,0 @@ -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fs::{self, File, OpenOptions}; -use std::io; -use std::io::prelude::*; -use std::iter; -use std::path::{Component, Path, PathBuf}; - -use filetime::FileTime; -use tempfile::Builder as TempFileBuilder; - -use crate::util::errors::{CargoResult, CargoResultExt}; - -pub fn join_paths>(paths: &[T], env: &str) -> CargoResult { - env::join_paths(paths.iter()) - .chain_err(|| { - let paths = paths.iter().map(Path::new).collect::>(); - format!("failed to join path array: {:?}", paths) - }) - .chain_err(|| { - format!( - "failed to join search paths together\n\ - Does ${} have an unterminated quote character?", - env - ) - }) -} - -pub fn dylib_path_envvar() -> &'static str { - if cfg!(windows) { - "PATH" - } else if cfg!(target_os = "macos") { - // When loading and linking a dynamic library or bundle, dlopen - // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and - // DYLD_FALLBACK_LIBRARY_PATH. - // In the Mach-O format, a dynamic library has an "install path." - // Clients linking against the library record this path, and the - // dynamic linker, dyld, uses it to locate the library. - // dyld searches DYLD_LIBRARY_PATH *before* the install path. - // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot - // find the library in the install path. - // Setting DYLD_LIBRARY_PATH can easily have unintended - // consequences. - // - // Also, DYLD_LIBRARY_PATH appears to have significant performance - // penalty starting in 10.13. Cargo's testsuite ran more than twice as - // slow with it on CI. - "DYLD_FALLBACK_LIBRARY_PATH" - } else { - "LD_LIBRARY_PATH" - } -} - -pub fn dylib_path() -> Vec { - match env::var_os(dylib_path_envvar()) { - Some(var) => env::split_paths(&var).collect(), - None => Vec::new(), - } -} - -pub fn normalize_path(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { - components.next(); - PathBuf::from(c.as_os_str()) - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - Component::ParentDir => { - ret.pop(); - } - Component::Normal(c) => { - ret.push(c); - } - } - } - ret -} - -pub fn resolve_executable(exec: &Path) -> CargoResult { - if exec.components().count() == 1 { - let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?; - let candidates = env::split_paths(&paths).flat_map(|path| { - let candidate = path.join(&exec); - let with_exe = if env::consts::EXE_EXTENSION.is_empty() { - None - } else { - Some(candidate.with_extension(env::consts::EXE_EXTENSION)) - }; - iter::once(candidate).chain(with_exe) - }); - for candidate in candidates { - if candidate.is_file() { - // PATH may have a component like "." in it, so we still need to - // canonicalize. - return Ok(candidate.canonicalize()?); - } - } - - anyhow::bail!("no executable for `{}` found in PATH", exec.display()) - } else { - Ok(exec.canonicalize()?) - } -} - -pub fn read(path: &Path) -> CargoResult { - match String::from_utf8(read_bytes(path)?) { - Ok(s) => Ok(s), - Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()), - } -} - -pub fn read_bytes(path: &Path) -> CargoResult> { - fs::read(path).chain_err(|| format!("failed to read `{}`", path.display())) -} - -pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> { - let path = path.as_ref(); - fs::write(path, contents.as_ref()).chain_err(|| format!("failed to write `{}`", path.display())) -} - -pub fn write_if_changed, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> { - (|| -> CargoResult<()> { - let contents = contents.as_ref(); - let mut f = OpenOptions::new() - .read(true) - .write(true) - .create(true) - .open(&path)?; - let mut orig = Vec::new(); - f.read_to_end(&mut orig)?; - if orig != contents { - f.set_len(0)?; - f.seek(io::SeekFrom::Start(0))?; - f.write_all(contents)?; - } - Ok(()) - })() - .chain_err(|| format!("failed to write `{}`", path.as_ref().display()))?; - Ok(()) -} - -pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { - (|| -> CargoResult<()> { - let mut f = OpenOptions::new() - .write(true) - .append(true) - .create(true) - .open(path)?; - - f.write_all(contents)?; - Ok(()) - })() - .chain_err(|| format!("failed to write `{}`", path.display()))?; - Ok(()) -} - -/// Creates a new file. -pub fn create>(path: P) -> CargoResult { - let path = path.as_ref(); - File::create(path).chain_err(|| format!("failed to create file `{}`", path.display())) -} - -/// Opens an existing file. -pub fn open>(path: P) -> CargoResult { - let path = path.as_ref(); - File::open(path).chain_err(|| format!("failed to open file `{}`", path.display())) -} - -pub fn mtime(path: &Path) -> CargoResult { - let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?; - Ok(FileTime::from_last_modification_time(&meta)) -} - -/// Returns the maximum mtime of the given path, recursing into -/// subdirectories, and following symlinks. -pub fn mtime_recursive(path: &Path) -> CargoResult { - let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?; - if !meta.is_dir() { - return Ok(FileTime::from_last_modification_time(&meta)); - } - let max_meta = walkdir::WalkDir::new(path) - .follow_links(true) - .into_iter() - .filter_map(|e| match e { - Ok(e) => Some(e), - Err(e) => { - // Ignore errors while walking. If Cargo can't access it, the - // build script probably can't access it, either. - log::debug!("failed to determine mtime while walking directory: {}", e); - None - } - }) - .filter_map(|e| { - if e.path_is_symlink() { - // Use the mtime of both the symlink and its target, to - // handle the case where the symlink is modified to a - // different target. - let sym_meta = match std::fs::symlink_metadata(e.path()) { - Ok(m) => m, - Err(err) => { - // I'm not sure when this is really possible (maybe a - // race with unlinking?). Regardless, if Cargo can't - // read it, the build script probably can't either. - log::debug!( - "failed to determine mtime while fetching symlink metdata of {}: {}", - e.path().display(), - err - ); - return None; - } - }; - let sym_mtime = FileTime::from_last_modification_time(&sym_meta); - // Walkdir follows symlinks. - match e.metadata() { - Ok(target_meta) => { - let target_mtime = FileTime::from_last_modification_time(&target_meta); - Some(sym_mtime.max(target_mtime)) - } - Err(err) => { - // Can't access the symlink target. If Cargo can't - // access it, the build script probably can't access - // it either. - log::debug!( - "failed to determine mtime of symlink target for {}: {}", - e.path().display(), - err - ); - Some(sym_mtime) - } - } - } else { - let meta = match e.metadata() { - Ok(m) => m, - Err(err) => { - // I'm not sure when this is really possible (maybe a - // race with unlinking?). Regardless, if Cargo can't - // read it, the build script probably can't either. - log::debug!( - "failed to determine mtime while fetching metadata of {}: {}", - e.path().display(), - err - ); - return None; - } - }; - Some(FileTime::from_last_modification_time(&meta)) - } - }) - .max() - // or_else handles the case where there are no files in the directory. - .unwrap_or_else(|| FileTime::from_last_modification_time(&meta)); - Ok(max_meta) -} - -/// Record the current time on the filesystem (using the filesystem's clock) -/// using a file at the given directory. Returns the current time. -pub fn set_invocation_time(path: &Path) -> CargoResult { - // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, - // then this can be removed. - let timestamp = path.join("invoked.timestamp"); - write( - ×tamp, - "This file has an mtime of when this was started.", - )?; - let ft = mtime(×tamp)?; - log::debug!("invocation time for {:?} is {}", path, ft); - Ok(ft) -} - -#[cfg(unix)] -pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { - use std::os::unix::prelude::*; - Ok(path.as_os_str().as_bytes()) -} -#[cfg(windows)] -pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { - match path.as_os_str().to_str() { - Some(s) => Ok(s.as_bytes()), - None => Err(anyhow::format_err!( - "invalid non-unicode path: {}", - path.display() - )), - } -} - -#[cfg(unix)] -pub fn bytes2path(bytes: &[u8]) -> CargoResult { - use std::os::unix::prelude::*; - Ok(PathBuf::from(OsStr::from_bytes(bytes))) -} -#[cfg(windows)] -pub fn bytes2path(bytes: &[u8]) -> CargoResult { - use std::str; - match str::from_utf8(bytes) { - Ok(s) => Ok(PathBuf::from(s)), - Err(..) => Err(anyhow::format_err!("invalid non-unicode path")), - } -} - -pub fn ancestors<'a>(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { - PathAncestors::new(path, stop_root_at) -} - -pub struct PathAncestors<'a> { - current: Option<&'a Path>, - stop_at: Option, -} - -impl<'a> PathAncestors<'a> { - fn new(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { - let stop_at = env::var("__CARGO_TEST_ROOT") - .ok() - .map(PathBuf::from) - .or_else(|| stop_root_at.map(|p| p.to_path_buf())); - PathAncestors { - current: Some(path), - //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. - stop_at, - } - } -} - -impl<'a> Iterator for PathAncestors<'a> { - type Item = &'a Path; - - fn next(&mut self) -> Option<&'a Path> { - if let Some(path) = self.current { - self.current = path.parent(); - - if let Some(ref stop_at) = self.stop_at { - if path == stop_at { - self.current = None; - } - } - - Some(path) - } else { - None - } - } -} - -pub fn create_dir_all(p: impl AsRef) -> CargoResult<()> { - _create_dir_all(p.as_ref()) -} - -fn _create_dir_all(p: &Path) -> CargoResult<()> { - fs::create_dir_all(p).chain_err(|| format!("failed to create directory `{}`", p.display()))?; - Ok(()) -} - -pub fn remove_dir_all>(p: P) -> CargoResult<()> { - _remove_dir_all(p.as_ref()) -} - -fn _remove_dir_all(p: &Path) -> CargoResult<()> { - if p.symlink_metadata() - .chain_err(|| format!("could not get metadata for `{}` to remove", p.display()))? - .file_type() - .is_symlink() - { - return remove_file(p); - } - let entries = p - .read_dir() - .chain_err(|| format!("failed to read directory `{}`", p.display()))?; - for entry in entries { - let entry = entry?; - let path = entry.path(); - if entry.file_type()?.is_dir() { - remove_dir_all(&path)?; - } else { - remove_file(&path)?; - } - } - remove_dir(&p) -} - -pub fn remove_dir>(p: P) -> CargoResult<()> { - _remove_dir(p.as_ref()) -} - -fn _remove_dir(p: &Path) -> CargoResult<()> { - fs::remove_dir(p).chain_err(|| format!("failed to remove directory `{}`", p.display()))?; - Ok(()) -} - -pub fn remove_file>(p: P) -> CargoResult<()> { - _remove_file(p.as_ref()) -} - -fn _remove_file(p: &Path) -> CargoResult<()> { - let mut err = match fs::remove_file(p) { - Ok(()) => return Ok(()), - Err(e) => e, - }; - - if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { - match fs::remove_file(p) { - Ok(()) => return Ok(()), - Err(e) => err = e, - } - } - - Err(err).chain_err(|| format!("failed to remove file `{}`", p.display()))?; - Ok(()) -} - -fn set_not_readonly(p: &Path) -> io::Result { - let mut perms = p.metadata()?.permissions(); - if !perms.readonly() { - return Ok(false); - } - perms.set_readonly(false); - fs::set_permissions(p, perms)?; - Ok(true) -} - -/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it. -/// -/// If the destination already exists, it is removed before linking. -pub fn link_or_copy(src: impl AsRef, dst: impl AsRef) -> CargoResult<()> { - let src = src.as_ref(); - let dst = dst.as_ref(); - _link_or_copy(src, dst) -} - -fn _link_or_copy(src: &Path, dst: &Path) -> CargoResult<()> { - log::debug!("linking {} to {}", src.display(), dst.display()); - if same_file::is_same_file(src, dst).unwrap_or(false) { - return Ok(()); - } - - // NB: we can't use dst.exists(), as if dst is a broken symlink, - // dst.exists() will return false. This is problematic, as we still need to - // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us - // whether dst exists *without* following symlinks, which is what we want. - if fs::symlink_metadata(dst).is_ok() { - remove_file(&dst)?; - } - - let link_result = if src.is_dir() { - #[cfg(target_os = "redox")] - use std::os::redox::fs::symlink; - #[cfg(unix)] - use std::os::unix::fs::symlink; - #[cfg(windows)] - // FIXME: This should probably panic or have a copy fallback. Symlinks - // are not supported in all windows environments. Currently symlinking - // is only used for .dSYM directories on macos, but this shouldn't be - // accidentally relied upon. - use std::os::windows::fs::symlink_dir as symlink; - - let dst_dir = dst.parent().unwrap(); - let src = if src.starts_with(dst_dir) { - src.strip_prefix(dst_dir).unwrap() - } else { - src - }; - symlink(src, dst) - } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() { - // This is a work-around for a bug in macOS 10.15. When running on - // APFS, there seems to be a strange race condition with - // Gatekeeper where it will forcefully kill a process launched via - // `cargo run` with SIGKILL. Copying seems to avoid the problem. - // This shouldn't affect anyone except Cargo's test suite because - // it is very rare, and only seems to happen under heavy load and - // rapidly creating lots of executables and running them. - // See https://github.com/rust-lang/cargo/issues/7821 for the - // gory details. - fs::copy(src, dst).map(|_| ()) - } else { - fs::hard_link(src, dst) - }; - link_result - .or_else(|err| { - log::debug!("link failed {}. falling back to fs::copy", err); - fs::copy(src, dst).map(|_| ()) - }) - .chain_err(|| { - format!( - "failed to link or copy `{}` to `{}`", - src.display(), - dst.display() - ) - })?; - Ok(()) -} - -/// Copies a file from one location to another. -pub fn copy, Q: AsRef>(from: P, to: Q) -> CargoResult { - let from = from.as_ref(); - let to = to.as_ref(); - fs::copy(from, to) - .chain_err(|| format!("failed to copy `{}` to `{}`", from.display(), to.display())) -} - -/// Changes the filesystem mtime (and atime if possible) for the given file. -/// -/// This intentionally does not return an error, as this is sometimes not -/// supported on network filesystems. For the current uses in Cargo, this is a -/// "best effort" approach, and errors shouldn't be propagated. -pub fn set_file_time_no_err>(path: P, time: FileTime) { - let path = path.as_ref(); - match filetime::set_file_times(path, time, time) { - Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time), - Err(e) => log::warn!( - "could not set mtime of {} to {}: {:?}", - path.display(), - time, - e - ), - } -} - -/// Strips `base` from `path`. -/// -/// This canonicalizes both paths before stripping. This is useful if the -/// paths are obtained in different ways, and one or the other may or may not -/// have been normalized in some way. -pub fn strip_prefix_canonical>( - path: P, - base: P, -) -> Result { - // Not all filesystems support canonicalize. Just ignore if it doesn't work. - let safe_canonicalize = |path: &Path| match path.canonicalize() { - Ok(p) => p, - Err(e) => { - log::warn!("cannot canonicalize {:?}: {:?}", path, e); - path.to_path_buf() - } - }; - let canon_path = safe_canonicalize(path.as_ref()); - let canon_base = safe_canonicalize(base.as_ref()); - canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf()) -} - -/// Creates an excluded from cache directory atomically with its parents as needed. -/// -/// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing -/// parent directories will not be created in an atomic manner. -/// -/// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it -/// already exists. -pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef) -> CargoResult<()> { - let path = p.as_ref(); - if path.is_dir() { - return Ok(()); - } - - let parent = path.parent().unwrap(); - let base = path.file_name().unwrap(); - create_dir_all(parent)?; - // We do this in two steps (first create a temporary directory and exlucde - // it from backups, then rename it to the desired name. If we created the - // directory directly where it should be and then excluded it from backups - // we would risk a situation where cargo is interrupted right after the directory - // creation but before the exclusion the the directory would remain non-excluded from - // backups because we only perform exclusion right after we created the directory - // ourselves. - // - // We need the tempdir created in parent instead of $TMP, because only then we can be - // easily sure that rename() will succeed (the new name needs to be on the same mount - // point as the old one). - let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?; - exclude_from_backups(tempdir.path()); - // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used - // here to create the directory directly and fs::create_dir_all() explicitly treats - // the directory being created concurrently by another thread or process as success, - // hence the check below to follow the existing behavior. If we get an error at - // rename() and suddently the directory (which didn't exist a moment earlier) exists - // we can infer from it it's another cargo process doing work. - if let Err(e) = fs::rename(tempdir.path(), path) { - if !path.exists() { - return Err(anyhow::Error::from(e)); - } - } - Ok(()) -} - -/// Marks the directory as excluded from archives/backups. -/// -/// This is recommended to prevent derived/temporary files from bloating backups. There are two -/// mechanisms used to achieve this right now: -/// -/// * A dedicated resource property excluding from Time Machine backups on macOS -/// * CACHEDIR.TAG files supported by various tools in a platform-independent way -fn exclude_from_backups(path: &Path) { - exclude_from_time_machine(path); - let _ = std::fs::write( - path.join("CACHEDIR.TAG"), - "Signature: 8a477f597d28d172789f06886806bc55 -# This file is a cache directory tag created by cargo. -# For information about cache directory tags see https://bford.info/cachedir/ -", - ); - // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature. -} - -#[cfg(not(target_os = "macos"))] -fn exclude_from_time_machine(_: &Path) {} - -#[cfg(target_os = "macos")] -/// Marks files or directories as excluded from Time Machine on macOS -fn exclude_from_time_machine(path: &Path) { - use core_foundation::base::TCFType; - use core_foundation::{number, string, url}; - use std::ptr; - - // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey - let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); - let path = url::CFURL::from_path(path, false); - if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { - unsafe { - url::CFURLSetResourcePropertyForKey( - path.as_concrete_TypeRef(), - is_excluded_key.as_concrete_TypeRef(), - number::kCFBooleanTrue as *const _, - ptr::null_mut(), - ); - } - } - // Errors are ignored, since it's an optional feature and failure - // doesn't prevent Cargo from working -} diff -Nru cargo-0.53.0/src/cargo/util/process_builder.rs cargo-0.54.0/src/cargo/util/process_builder.rs --- cargo-0.53.0/src/cargo/util/process_builder.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/process_builder.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,414 +0,0 @@ -use crate::util::{process_error, read2, CargoResult, CargoResultExt}; -use anyhow::bail; -use jobserver::Client; -use shell_escape::escape; -use std::collections::BTreeMap; -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fmt; -use std::iter::once; -use std::path::Path; -use std::process::{Command, Output, Stdio}; - -/// A builder object for an external process, similar to `std::process::Command`. -#[derive(Clone, Debug)] -pub struct ProcessBuilder { - /// The program to execute. - program: OsString, - /// A list of arguments to pass to the program. - args: Vec, - /// Any environment variables that should be set for the program. - env: BTreeMap>, - /// The directory to run the program from. - cwd: Option, - /// The `make` jobserver. See the [jobserver crate][jobserver_docs] for - /// more information. - /// - /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ - jobserver: Option, - /// `true` to include environment variable in display. - display_env_vars: bool, -} - -impl fmt::Display for ProcessBuilder { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "`")?; - - if self.display_env_vars { - for (key, val) in self.env.iter() { - if let Some(val) = val { - let val = escape(val.to_string_lossy()); - if cfg!(windows) { - write!(f, "set {}={}&& ", key, val)?; - } else { - write!(f, "{}={} ", key, val)?; - } - } - } - } - - write!(f, "{}", self.program.to_string_lossy())?; - - for arg in &self.args { - write!(f, " {}", escape(arg.to_string_lossy()))?; - } - - write!(f, "`") - } -} - -impl ProcessBuilder { - /// (chainable) Sets the executable for the process. - pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { - self.program = program.as_ref().to_os_string(); - self - } - - /// (chainable) Adds `arg` to the args list. - pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { - self.args.push(arg.as_ref().to_os_string()); - self - } - - /// (chainable) Adds multiple `args` to the args list. - pub fn args>(&mut self, args: &[T]) -> &mut ProcessBuilder { - self.args - .extend(args.iter().map(|t| t.as_ref().to_os_string())); - self - } - - /// (chainable) Replaces the args list with the given `args`. - pub fn args_replace>(&mut self, args: &[T]) -> &mut ProcessBuilder { - self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect(); - self - } - - /// (chainable) Sets the current working directory of the process. - pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { - self.cwd = Some(path.as_ref().to_os_string()); - self - } - - /// (chainable) Sets an environment variable for the process. - pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { - self.env - .insert(key.to_string(), Some(val.as_ref().to_os_string())); - self - } - - /// (chainable) Unsets an environment variable for the process. - pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { - self.env.insert(key.to_string(), None); - self - } - - /// Gets the executable name. - pub fn get_program(&self) -> &OsString { - &self.program - } - - /// Gets the program arguments. - pub fn get_args(&self) -> &[OsString] { - &self.args - } - - /// Gets the current working directory for the process. - pub fn get_cwd(&self) -> Option<&Path> { - self.cwd.as_ref().map(Path::new) - } - - /// Gets an environment variable as the process will see it (will inherit from environment - /// unless explicitally unset). - pub fn get_env(&self, var: &str) -> Option { - self.env - .get(var) - .cloned() - .or_else(|| Some(env::var_os(var))) - .and_then(|s| s) - } - - /// Gets all environment variables explicitly set or unset for the process (not inherited - /// vars). - pub fn get_envs(&self) -> &BTreeMap> { - &self.env - } - - /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for - /// more information. - /// - /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ - pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { - self.jobserver = Some(jobserver.clone()); - self - } - - /// Enables environment variable display. - pub fn display_env_vars(&mut self) -> &mut Self { - self.display_env_vars = true; - self - } - - /// Runs the process, waiting for completion, and mapping non-success exit codes to an error. - pub fn exec(&self) -> CargoResult<()> { - let mut command = self.build_command(); - let exit = command.status().chain_err(|| { - process_error(&format!("could not execute process {}", self), None, None) - })?; - - if exit.success() { - Ok(()) - } else { - Err(process_error( - &format!("process didn't exit successfully: {}", self), - Some(exit), - None, - ) - .into()) - } - } - - /// Replaces the current process with the target process. - /// - /// On Unix, this executes the process using the Unix syscall `execvp`, which will block - /// this process, and will only return if there is an error. - /// - /// On Windows this isn't technically possible. Instead we emulate it to the best of our - /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler. - /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C - /// handling to the application at hand, which will either terminate or handle it itself. - /// According to Microsoft's documentation at - /// . - /// the Ctrl-C signal is sent to all processes attached to a terminal, which should - /// include our child process. If the child terminates then we'll reap them in Cargo - /// pretty quickly, and if the child handles the signal then we won't terminate - /// (and we shouldn't!) until the process itself later exits. - pub fn exec_replace(&self) -> CargoResult<()> { - imp::exec_replace(self) - } - - /// Executes the process, returning the stdio output, or an error if non-zero exit status. - pub fn exec_with_output(&self) -> CargoResult { - let mut command = self.build_command(); - - let output = command.output().chain_err(|| { - process_error(&format!("could not execute process {}", self), None, None) - })?; - - if output.status.success() { - Ok(output) - } else { - Err(process_error( - &format!("process didn't exit successfully: {}", self), - Some(output.status), - Some(&output), - ) - .into()) - } - } - - /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which - /// can mutate the string data. - /// - /// If any invocations of these function return an error, it will be propagated. - /// - /// If `capture_output` is true, then all the output will also be buffered - /// and stored in the returned `Output` object. If it is false, no caching - /// is done, and the callbacks are solely responsible for handling the - /// output. - pub fn exec_with_streaming( - &self, - on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, - on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, - capture_output: bool, - ) -> CargoResult { - let mut stdout = Vec::new(); - let mut stderr = Vec::new(); - - let mut cmd = self.build_command(); - cmd.stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .stdin(Stdio::null()); - - let mut callback_error = None; - let status = (|| { - let mut child = cmd.spawn()?; - let out = child.stdout.take().unwrap(); - let err = child.stderr.take().unwrap(); - read2(out, err, &mut |is_out, data, eof| { - let idx = if eof { - data.len() - } else { - match data.iter().rposition(|b| *b == b'\n') { - Some(i) => i + 1, - None => return, - } - }; - { - // scope for new_lines - let new_lines = if capture_output { - let dst = if is_out { &mut stdout } else { &mut stderr }; - let start = dst.len(); - let data = data.drain(..idx); - dst.extend(data); - &dst[start..] - } else { - &data[..idx] - }; - for line in String::from_utf8_lossy(new_lines).lines() { - if callback_error.is_some() { - break; - } - let callback_result = if is_out { - on_stdout_line(line) - } else { - on_stderr_line(line) - }; - if let Err(e) = callback_result { - callback_error = Some(e); - } - } - } - if !capture_output { - data.drain(..idx); - } - })?; - child.wait() - })() - .chain_err(|| process_error(&format!("could not execute process {}", self), None, None))?; - let output = Output { - status, - stdout, - stderr, - }; - - { - let to_print = if capture_output { Some(&output) } else { None }; - if let Some(e) = callback_error { - let cx = process_error( - &format!("failed to parse process output: {}", self), - Some(output.status), - to_print, - ); - bail!(anyhow::Error::new(cx).context(e)); - } else if !output.status.success() { - bail!(process_error( - &format!("process didn't exit successfully: {}", self), - Some(output.status), - to_print, - )); - } - } - - Ok(output) - } - - /// Converts `ProcessBuilder` into a `std::process::Command`, and handles the jobserver, if - /// present. - pub fn build_command(&self) -> Command { - let mut command = Command::new(&self.program); - if let Some(cwd) = self.get_cwd() { - command.current_dir(cwd); - } - for arg in &self.args { - command.arg(arg); - } - for (k, v) in &self.env { - match *v { - Some(ref v) => { - command.env(k, v); - } - None => { - command.env_remove(k); - } - } - } - if let Some(ref c) = self.jobserver { - c.configure(&mut command); - } - command - } - - /// Wraps an existing command with the provided wrapper, if it is present and valid. - /// - /// # Examples - /// - /// ```rust - /// use cargo::util::{ProcessBuilder, process}; - /// // Running this would execute `rustc` - /// let cmd: ProcessBuilder = process("rustc"); - /// - /// // Running this will execute `sccache rustc` - /// let cmd = cmd.wrapped(Some("sccache")); - /// ``` - pub fn wrapped(mut self, wrapper: Option>) -> Self { - let wrapper = if let Some(wrapper) = wrapper.as_ref() { - wrapper.as_ref() - } else { - return self; - }; - - if wrapper.is_empty() { - return self; - } - - let args = once(self.program).chain(self.args.into_iter()).collect(); - - self.program = wrapper.to_os_string(); - self.args = args; - - self - } -} - -/// A helper function to create a `ProcessBuilder`. -pub fn process>(cmd: T) -> ProcessBuilder { - ProcessBuilder { - program: cmd.as_ref().to_os_string(), - args: Vec::new(), - cwd: None, - env: BTreeMap::new(), - jobserver: None, - display_env_vars: false, - } -} - -#[cfg(unix)] -mod imp { - use crate::util::{process_error, ProcessBuilder}; - use crate::CargoResult; - use std::os::unix::process::CommandExt; - - pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> { - let mut command = process_builder.build_command(); - let error = command.exec(); - Err(anyhow::Error::from(error).context(process_error( - &format!("could not execute process {}", process_builder), - None, - None, - ))) - } -} - -#[cfg(windows)] -mod imp { - use crate::util::{process_error, ProcessBuilder}; - use crate::CargoResult; - use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; - use winapi::um::consoleapi::SetConsoleCtrlHandler; - - unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL { - // Do nothing; let the child process handle it. - TRUE - } - - pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> { - unsafe { - if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { - return Err(process_error("Could not set Ctrl-C handler.", None, None).into()); - } - } - - // Just execute the process as normal. - process_builder.exec() - } -} diff -Nru cargo-0.53.0/src/cargo/util/progress.rs cargo-0.54.0/src/cargo/util/progress.rs --- cargo-0.53.0/src/cargo/util/progress.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/progress.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,8 +4,8 @@ use crate::core::shell::Verbosity; use crate::util::config::ProgressWhen; -use crate::util::{is_ci, CargoResult, Config}; - +use crate::util::{CargoResult, Config}; +use cargo_util::is_ci; use unicode_width::UnicodeWidthChar; pub struct Progress<'cfg> { diff -Nru cargo-0.53.0/src/cargo/util/read2.rs cargo-0.54.0/src/cargo/util/read2.rs --- cargo-0.53.0/src/cargo/util/read2.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/read2.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,178 +0,0 @@ -pub use self::imp::read2; - -#[cfg(unix)] -mod imp { - use std::io; - use std::io::prelude::*; - use std::mem; - use std::os::unix::prelude::*; - use std::process::{ChildStderr, ChildStdout}; - - pub fn read2( - mut out_pipe: ChildStdout, - mut err_pipe: ChildStderr, - data: &mut dyn FnMut(bool, &mut Vec, bool), - ) -> io::Result<()> { - unsafe { - libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); - libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); - } - - let mut out_done = false; - let mut err_done = false; - let mut out = Vec::new(); - let mut err = Vec::new(); - - let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; - fds[0].fd = out_pipe.as_raw_fd(); - fds[0].events = libc::POLLIN; - fds[1].fd = err_pipe.as_raw_fd(); - fds[1].events = libc::POLLIN; - let mut nfds = 2; - let mut errfd = 1; - - while nfds > 0 { - // wait for either pipe to become readable using `select` - let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; - if r == -1 { - let err = io::Error::last_os_error(); - if err.kind() == io::ErrorKind::Interrupted { - continue; - } - return Err(err); - } - - // Read as much as we can from each pipe, ignoring EWOULDBLOCK or - // EAGAIN. If we hit EOF, then this will happen because the underlying - // reader will return Ok(0), in which case we'll see `Ok` ourselves. In - // this case we flip the other fd back into blocking mode and read - // whatever's leftover on that file descriptor. - let handle = |res: io::Result<_>| match res { - Ok(_) => Ok(true), - Err(e) => { - if e.kind() == io::ErrorKind::WouldBlock { - Ok(false) - } else { - Err(e) - } - } - }; - if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { - err_done = true; - nfds -= 1; - } - data(false, &mut err, err_done); - if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { - out_done = true; - fds[0].fd = err_pipe.as_raw_fd(); - errfd = 0; - nfds -= 1; - } - data(true, &mut out, out_done); - } - Ok(()) - } -} - -#[cfg(windows)] -mod imp { - use std::io; - use std::os::windows::prelude::*; - use std::process::{ChildStderr, ChildStdout}; - use std::slice; - - use miow::iocp::{CompletionPort, CompletionStatus}; - use miow::pipe::NamedPipe; - use miow::Overlapped; - use winapi::shared::winerror::ERROR_BROKEN_PIPE; - - struct Pipe<'a> { - dst: &'a mut Vec, - overlapped: Overlapped, - pipe: NamedPipe, - done: bool, - } - - pub fn read2( - out_pipe: ChildStdout, - err_pipe: ChildStderr, - data: &mut dyn FnMut(bool, &mut Vec, bool), - ) -> io::Result<()> { - let mut out = Vec::new(); - let mut err = Vec::new(); - - let port = CompletionPort::new(1)?; - port.add_handle(0, &out_pipe)?; - port.add_handle(1, &err_pipe)?; - - unsafe { - let mut out_pipe = Pipe::new(out_pipe, &mut out); - let mut err_pipe = Pipe::new(err_pipe, &mut err); - - out_pipe.read()?; - err_pipe.read()?; - - let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; - - while !out_pipe.done || !err_pipe.done { - for status in port.get_many(&mut status, None)? { - if status.token() == 0 { - out_pipe.complete(status); - data(true, out_pipe.dst, out_pipe.done); - out_pipe.read()?; - } else { - err_pipe.complete(status); - data(false, err_pipe.dst, err_pipe.done); - err_pipe.read()?; - } - } - } - - Ok(()) - } - } - - impl<'a> Pipe<'a> { - unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { - Pipe { - dst, - pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), - overlapped: Overlapped::zero(), - done: false, - } - } - - unsafe fn read(&mut self) -> io::Result<()> { - let dst = slice_to_end(self.dst); - match self.pipe.read_overlapped(dst, self.overlapped.raw()) { - Ok(_) => Ok(()), - Err(e) => { - if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { - self.done = true; - Ok(()) - } else { - Err(e) - } - } - } - } - - unsafe fn complete(&mut self, status: &CompletionStatus) { - let prev = self.dst.len(); - self.dst.set_len(prev + status.bytes_transferred() as usize); - if status.bytes_transferred() == 0 { - self.done = true; - } - } - } - - unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { - if v.capacity() == 0 { - v.reserve(16); - } - if v.capacity() == v.len() { - v.reserve(1); - } - slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) - } -} diff -Nru cargo-0.53.0/src/cargo/util/rustc.rs cargo-0.54.0/src/cargo/util/rustc.rs --- cargo-0.53.0/src/cargo/util/rustc.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/rustc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,12 +4,13 @@ use std::path::{Path, PathBuf}; use std::sync::Mutex; +use anyhow::Context as _; +use cargo_util::{paths, ProcessBuilder, ProcessError}; use log::{debug, info, warn}; use serde::{Deserialize, Serialize}; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{self, profile, CargoResult, CargoResultExt, ProcessBuilder, StableHasher}; +use crate::util::{profile, CargoResult, StableHasher}; /// Information on the `rustc` executable #[derive(Debug)] @@ -45,9 +46,15 @@ ) -> CargoResult { let _p = profile::start("Rustc::new"); - let mut cache = Cache::load(&path, rustup_rustc, cache_location); + let mut cache = Cache::load( + wrapper.as_deref(), + workspace_wrapper.as_deref(), + &path, + rustup_rustc, + cache_location, + ); - let mut cmd = util::process(&path); + let mut cmd = ProcessBuilder::new(&path); cmd.arg("-vV"); let verbose_version = cache.cached_output(&cmd, 0)?.0; @@ -66,7 +73,7 @@ }; let host = InternedString::new(extract("host: ")?); - let version = semver::Version::parse(extract("release: ")?).chain_err(|| { + let version = semver::Version::parse(extract("release: ")?).with_context(|| { format!( "rustc version does not appear to be a valid semver version, from:\n{}", verbose_version @@ -86,18 +93,18 @@ /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. pub fn process(&self) -> ProcessBuilder { - util::process(self.path.as_path()).wrapped(self.wrapper.as_ref()) + ProcessBuilder::new(self.path.as_path()).wrapped(self.wrapper.as_ref()) } /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. pub fn workspace_process(&self) -> ProcessBuilder { - util::process(self.path.as_path()) + ProcessBuilder::new(self.path.as_path()) .wrapped(self.workspace_wrapper.as_ref()) .wrapped(self.wrapper.as_ref()) } pub fn process_no_wrapper(&self) -> ProcessBuilder { - util::process(&self.path) + ProcessBuilder::new(&self.path) } /// Gets the output for the given command. @@ -154,8 +161,17 @@ } impl Cache { - fn load(rustc: &Path, rustup_rustc: &Path, cache_location: Option) -> Cache { - match (cache_location, rustc_fingerprint(rustc, rustup_rustc)) { + fn load( + wrapper: Option<&Path>, + workspace_wrapper: Option<&Path>, + rustc: &Path, + rustup_rustc: &Path, + cache_location: Option, + ) -> Cache { + match ( + cache_location, + rustc_fingerprint(wrapper, workspace_wrapper, rustc, rustup_rustc), + ) { (Some(cache_location), Ok(rustc_fingerprint)) => { let empty = CacheData { rustc_fingerprint, @@ -218,13 +234,13 @@ let output = cmd .build_command() .output() - .chain_err(|| format!("could not execute process {} (never executed)", cmd))?; + .with_context(|| format!("could not execute process {} (never executed)", cmd))?; let stdout = String::from_utf8(output.stdout) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) - .chain_err(|| anyhow::anyhow!("`{}` didn't return utf8 output", cmd))?; + .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; let stderr = String::from_utf8(output.stderr) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) - .chain_err(|| anyhow::anyhow!("`{}` didn't return utf8 output", cmd))?; + .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; self.data.outputs.insert( key, Output { @@ -232,7 +248,7 @@ status: if output.status.success() { String::new() } else { - util::exit_status_to_string(output.status) + cargo_util::exit_status_to_string(output.status) }, code: output.status.code(), stdout, @@ -245,7 +261,7 @@ if output.success { Ok((output.stdout.clone(), output.stderr.clone())) } else { - Err(util::process_error_raw( + Err(ProcessError::new_raw( &format!("process didn't exit successfully: {}", cmd), output.code, &output.status, @@ -272,13 +288,29 @@ } } -fn rustc_fingerprint(path: &Path, rustup_rustc: &Path) -> CargoResult { +fn rustc_fingerprint( + wrapper: Option<&Path>, + workspace_wrapper: Option<&Path>, + rustc: &Path, + rustup_rustc: &Path, +) -> CargoResult { let mut hasher = StableHasher::new(); - let path = paths::resolve_executable(path)?; - path.hash(&mut hasher); - - paths::mtime(&path)?.hash(&mut hasher); + let hash_exe = |hasher: &mut _, path| -> CargoResult<()> { + let path = paths::resolve_executable(path)?; + path.hash(hasher); + + paths::mtime(&path)?.hash(hasher); + Ok(()) + }; + + hash_exe(&mut hasher, rustc)?; + if let Some(wrapper) = wrapper { + hash_exe(&mut hasher, wrapper)?; + } + if let Some(workspace_wrapper) = workspace_wrapper { + hash_exe(&mut hasher, workspace_wrapper)?; + } // Rustup can change the effective compiler without touching // the `rustc` binary, so we try to account for this here. @@ -291,7 +323,7 @@ // // If we don't see rustup env vars, but it looks like the compiler // is managed by rustup, we conservatively bail out. - let maybe_rustup = rustup_rustc == path; + let maybe_rustup = rustup_rustc == rustc; match ( maybe_rustup, env::var("RUSTUP_HOME"), diff -Nru cargo-0.53.0/src/cargo/util/sha256.rs cargo-0.54.0/src/cargo/util/sha256.rs --- cargo-0.53.0/src/cargo/util/sha256.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/sha256.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -use crate::util::{paths, CargoResult, CargoResultExt}; -use crypto_hash::{Algorithm, Hasher}; -use std::fs::File; -use std::io::{self, Read, Write}; -use std::path::Path; - -pub struct Sha256(Hasher); - -impl Sha256 { - pub fn new() -> Sha256 { - let hasher = Hasher::new(Algorithm::SHA256); - Sha256(hasher) - } - - pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 { - let _ = self.0.write_all(bytes); - self - } - - pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> { - let mut buf = [0; 64 * 1024]; - loop { - let n = file.read(&mut buf)?; - if n == 0 { - break Ok(self); - } - self.update(&buf[..n]); - } - } - - pub fn update_path>(&mut self, path: P) -> CargoResult<&mut Sha256> { - let path = path.as_ref(); - let file = paths::open(path)?; - self.update_file(&file) - .chain_err(|| format!("failed to read `{}`", path.display()))?; - Ok(self) - } - - pub fn finish(&mut self) -> [u8; 32] { - let mut ret = [0u8; 32]; - let data = self.0.finish(); - ret.copy_from_slice(&data[..]); - ret - } - - pub fn finish_hex(&mut self) -> String { - hex::encode(self.finish()) - } -} - -impl Default for Sha256 { - fn default() -> Self { - Self::new() - } -} diff -Nru cargo-0.53.0/src/cargo/util/toml/mod.rs cargo-0.54.0/src/cargo/util/toml/mod.rs --- cargo-0.53.0/src/cargo/util/toml/mod.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/toml/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,8 +5,9 @@ use std::rc::Rc; use std::str; -use anyhow::{anyhow, bail}; +use anyhow::{anyhow, bail, Context as _}; use cargo_platform::Platform; +use cargo_util::paths; use log::{debug, trace}; use semver::{self, VersionReq}; use serde::de; @@ -14,6 +15,7 @@ use serde::{Deserialize, Serialize}; use url::Url; +use crate::core::compiler::{CompileKind, CompileTarget}; use crate::core::dependency::DepKind; use crate::core::manifest::{ManifestMetadata, TargetSourcePath, Warnings}; use crate::core::resolver::ResolveBehavior; @@ -21,11 +23,9 @@ use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest, Workspace}; use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; +use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; -use crate::util::{ - self, config::ConfigRelativePath, paths, validate_package_name, Config, IntoUrl, -}; +use crate::util::{self, config::ConfigRelativePath, validate_package_name, Config, IntoUrl}; mod targets; use self::targets::targets; @@ -51,7 +51,7 @@ let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?; do_read_manifest(&contents, path, source_id, config) - .chain_err(|| format!("failed to parse manifest at `{}`", path.display())) + .with_context(|| format!("failed to parse manifest at `{}`", path.display())) .map_err(|err| ManifestError::new(err, path.into())) } @@ -419,53 +419,13 @@ } } -#[derive(Clone, Debug, Serialize, Eq, PartialEq)] -#[serde(untagged)] +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(untagged, expecting = "expected a boolean or an integer")] pub enum U32OrBool { U32(u32), Bool(bool), } -impl<'de> de::Deserialize<'de> for U32OrBool { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = U32OrBool; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a boolean or an integer") - } - - fn visit_bool(self, b: bool) -> Result - where - E: de::Error, - { - Ok(U32OrBool::Bool(b)) - } - - fn visit_i64(self, u: i64) -> Result - where - E: de::Error, - { - Ok(U32OrBool::U32(u as u32)) - } - - fn visit_u64(self, u: u64) -> Result - where - E: de::Error, - { - Ok(U32OrBool::U32(u as u32)) - } - } - - deserializer.deserialize_any(Visitor) - } -} - #[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)] #[serde(default, rename_all = "kebab-case")] pub struct TomlProfile { @@ -770,46 +730,13 @@ } } -#[derive(Clone, Debug, Serialize, Eq, PartialEq)] -#[serde(untagged)] +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(untagged, expecting = "expected a boolean or a string")] pub enum StringOrBool { String(String), Bool(bool), } -impl<'de> de::Deserialize<'de> for StringOrBool { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = StringOrBool; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a boolean or a string") - } - - fn visit_bool(self, b: bool) -> Result - where - E: de::Error, - { - Ok(StringOrBool::Bool(b)) - } - - fn visit_str(self, s: &str) -> Result - where - E: de::Error, - { - Ok(StringOrBool::String(s.to_string())) - } - } - - deserializer.deserialize_any(Visitor) - } -} - #[derive(PartialEq, Clone, Debug, Serialize)] #[serde(untagged)] pub enum VecStringOrBool { @@ -867,6 +794,10 @@ authors: Option>, build: Option, metabuild: Option, + #[serde(rename = "default-target")] + default_target: Option, + #[serde(rename = "forced-target")] + forced_target: Option, links: Option, exclude: Option>, include: Option>, @@ -1102,10 +1033,10 @@ let edition = if let Some(ref edition) = project.edition { features .require(Feature::edition()) - .chain_err(|| "editions are unstable")?; + .with_context(|| "editions are unstable")?; edition .parse() - .chain_err(|| "failed to parse the `edition` key")? + .with_context(|| "failed to parse the `edition` key")? } else { Edition::Edition2015 }; @@ -1387,9 +1318,24 @@ } } + let default_kind = project + .default_target + .as_ref() + .map(|t| CompileTarget::new(&*t)) + .transpose()? + .map(CompileKind::Target); + let forced_kind = project + .forced_target + .as_ref() + .map(|t| CompileTarget::new(&*t)) + .transpose()? + .map(CompileKind::Target); + let custom_metadata = project.metadata.clone(); let mut manifest = Manifest::new( summary, + default_kind, + forced_kind, targets, exclude, include, @@ -1544,7 +1490,7 @@ } let mut replace = Vec::new(); for (spec, replacement) in self.replace.iter().flatten() { - let mut spec = PackageIdSpec::parse(spec).chain_err(|| { + let mut spec = PackageIdSpec::parse(spec).with_context(|| { format!( "replacements must specify a valid semver \ version to replace, but `{}` does not", @@ -1588,7 +1534,7 @@ .config .get_registry_index(url) .or_else(|_| url.into_url()) - .chain_err(|| { + .with_context(|| { format!("[patch] entry `{}` should be a URL or registry name", url) })?, }; @@ -1779,6 +1725,35 @@ } } + // Early detection of potentially misused feature syntax + // instead of generating a "feature not found" error. + if let Some(features) = &self.features { + for feature in features { + if feature.contains('/') { + bail!( + "feature `{}` in dependency `{}` is not allowed to contain slashes\n\ + If you want to enable features of a transitive dependency, \ + the direct dependency needs to re-export those features from \ + the `[features]` table.", + feature, + name_in_toml + ); + } + if feature.starts_with("dep:") { + bail!( + "feature `{}` in dependency `{}` is not allowed to use explicit \ + `dep:` syntax\n\ + If you want to enable an optional dependency, specify the name \ + of the optional dependency without the `dep:` prefix, or specify \ + a feature from the dependency's `[features]` table that enables \ + the optional dependency.", + feature, + name_in_toml + ); + } + } + } + let new_source_id = match ( self.git.as_ref(), self.path.as_ref(), @@ -1853,7 +1828,7 @@ // built from. if cx.source_id.is_path() { let path = cx.root.join(path); - let path = util::normalize_path(&path); + let path = paths::normalize_path(&path); SourceId::for_path(&path)? } else { cx.source_id diff -Nru cargo-0.53.0/src/cargo/util/toml/targets.rs cargo-0.54.0/src/cargo/util/toml/targets.rs --- cargo-0.53.0/src/cargo/util/toml/targets.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/toml/targets.rs 2021-04-27 14:35:53.000000000 +0000 @@ -20,9 +20,11 @@ }; use crate::core::compiler::CrateType; use crate::core::{Edition, Feature, Features, Target}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::restricted_names; +use anyhow::Context as _; + pub fn targets( features: &Features, manifest: &TomlManifest, @@ -787,11 +789,11 @@ if let Some(edition) = toml.edition.clone() { features .require(Feature::edition()) - .chain_err(|| "editions are unstable")?; + .with_context(|| "editions are unstable")?; target.set_edition( edition .parse() - .chain_err(|| "failed to parse the `edition` key")?, + .with_context(|| "failed to parse the `edition` key")?, ); } Ok(()) diff -Nru cargo-0.53.0/src/cargo/util/vcs.rs cargo-0.54.0/src/cargo/util/vcs.rs --- cargo-0.53.0/src/cargo/util/vcs.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/vcs.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,6 @@ -use crate::util::paths; -use crate::util::{process, CargoResult}; +use crate::util::CargoResult; +use cargo_util::paths; +use cargo_util::ProcessBuilder; use std::path::Path; // Check if we are in an existing repo. We define that to be true if either: @@ -41,11 +42,15 @@ impl HgRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { - process("hg").cwd(cwd).arg("init").arg(path).exec()?; + ProcessBuilder::new("hg") + .cwd(cwd) + .arg("init") + .arg(path) + .exec()?; Ok(HgRepo) } pub fn discover(path: &Path, cwd: &Path) -> CargoResult { - process("hg") + ProcessBuilder::new("hg") .cwd(cwd) .arg("--cwd") .arg(path) @@ -57,7 +62,11 @@ impl PijulRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { - process("pijul").cwd(cwd).arg("init").arg(path).exec()?; + ProcessBuilder::new("pijul") + .cwd(cwd) + .arg("init") + .arg(path) + .exec()?; Ok(PijulRepo) } } @@ -73,28 +82,28 @@ db_path.push(db_fname); // then create the fossil DB in that location - process("fossil") + ProcessBuilder::new("fossil") .cwd(cwd) .arg("init") .arg(&db_path) .exec()?; // open it in that new directory - process("fossil") + ProcessBuilder::new("fossil") .cwd(&path) .arg("open") .arg(db_fname) .exec()?; // set `target` as ignoreable and cleanable - process("fossil") + ProcessBuilder::new("fossil") .cwd(cwd) .arg("settings") .arg("ignore-glob") .arg("target") .exec()?; - process("fossil") + ProcessBuilder::new("fossil") .cwd(cwd) .arg("settings") .arg("clean-glob") diff -Nru cargo-0.53.0/src/cargo/util/workspace.rs cargo-0.54.0/src/cargo/util/workspace.rs --- cargo-0.53.0/src/cargo/util/workspace.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/workspace.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,10 @@ -use super::ProcessBuilder; use crate::core::compiler::Unit; use crate::core::manifest::TargetSourcePath; use crate::core::{Target, Workspace}; use crate::ops::CompileOptions; use crate::util::CargoResult; use anyhow::bail; +use cargo_util::ProcessBuilder; use std::fmt::Write; use std::path::PathBuf; diff -Nru cargo-0.53.0/src/doc/contrib/src/index.md cargo-0.54.0/src/doc/contrib/src/index.md --- cargo-0.53.0/src/doc/contrib/src/index.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/contrib/src/index.md 2021-04-27 14:35:53.000000000 +0000 @@ -8,7 +8,7 @@ issues], [improving the documentation], [fixing bugs], and working on [small] and [large features]. -If you have a general question about Cargo or it's internals, feel free to ask +If you have a general question about Cargo or its internals, feel free to ask on [Zulip]. This guide assumes you have some familiarity with Rust, and how to use Cargo, diff -Nru cargo-0.53.0/src/doc/man/cargo-build.md cargo-0.54.0/src/doc/man/cargo-build.md --- cargo-0.53.0/src/doc/man/cargo-build.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-build.md 2021-04-27 14:35:53.000000000 +0000 @@ -48,7 +48,7 @@ This option is unstable and available only on the [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) and requires the `-Z unstable-options` flag to enable. -See https://github.com/rust-lang/cargo/issues/6790 for more information. +See for more information. {{/option}} {{/options}} diff -Nru cargo-0.53.0/src/doc/man/cargo-init.md cargo-0.54.0/src/doc/man/cargo-init.md --- cargo-0.53.0/src/doc/man/cargo-init.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-init.md 2021-04-27 14:35:53.000000000 +0000 @@ -20,8 +20,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -{{> description-new-authors }} - See {{man "cargo-new" 1}} for a similar command which will create a new package in a new directory. diff -Nru cargo-0.53.0/src/doc/man/cargo-new.md cargo-0.54.0/src/doc/man/cargo-new.md --- cargo-0.53.0/src/doc/man/cargo-new.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-new.md 2021-04-27 14:35:53.000000000 +0000 @@ -15,8 +15,6 @@ and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -{{> description-new-authors }} - See {{man "cargo-init" 1}} for a similar command which will create a new manifest in an existing directory. diff -Nru cargo-0.53.0/src/doc/man/generated_txt/cargo-build.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-build.txt --- cargo-0.53.0/src/doc/man/generated_txt/cargo-build.txt 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-build.txt 2021-04-27 14:35:53.000000000 +0000 @@ -160,7 +160,8 @@ This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. See - https://github.com/rust-lang/cargo/issues/6790 for more information. + for more + information. Display Options -v, --verbose diff -Nru cargo-0.53.0/src/doc/man/generated_txt/cargo-init.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-init.txt --- cargo-0.53.0/src/doc/man/generated_txt/cargo-init.txt 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-init.txt 2021-04-27 14:35:53.000000000 +0000 @@ -17,44 +17,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see --vcs below). - The "authors" field in the manifest is determined from the environment - or configuration settings. A name is required and is determined from - (first match wins): - - o cargo-new.name Cargo config value - - o CARGO_NAME environment variable - - o GIT_AUTHOR_NAME environment variable - - o GIT_COMMITTER_NAME environment variable - - o user.name git configuration value - - o USER environment variable - - o USERNAME environment variable - - o NAME environment variable - - The email address is optional and is determined from: - - o cargo-new.email Cargo config value - - o CARGO_EMAIL environment variable - - o GIT_AUTHOR_EMAIL environment variable - - o GIT_COMMITTER_EMAIL environment variable - - o user.email git configuration value - - o EMAIL environment variable - - See the reference - for more - information about configuration files. - See cargo-new(1) for a similar command which will create a new package in a new directory. diff -Nru cargo-0.53.0/src/doc/man/generated_txt/cargo-new.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-new.txt --- cargo-0.53.0/src/doc/man/generated_txt/cargo-new.txt 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-new.txt 2021-04-27 14:35:53.000000000 +0000 @@ -12,44 +12,6 @@ source file, and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see --vcs below). - The "authors" field in the manifest is determined from the environment - or configuration settings. A name is required and is determined from - (first match wins): - - o cargo-new.name Cargo config value - - o CARGO_NAME environment variable - - o GIT_AUTHOR_NAME environment variable - - o GIT_COMMITTER_NAME environment variable - - o user.name git configuration value - - o USER environment variable - - o USERNAME environment variable - - o NAME environment variable - - The email address is optional and is determined from: - - o cargo-new.email Cargo config value - - o CARGO_EMAIL environment variable - - o GIT_AUTHOR_EMAIL environment variable - - o GIT_COMMITTER_EMAIL environment variable - - o user.email git configuration value - - o EMAIL environment variable - - See the reference - for more - information about configuration files. - See cargo-init(1) for a similar command which will create a new manifest in an existing directory. diff -Nru cargo-0.53.0/src/doc/man/includes/description-new-authors.md cargo-0.54.0/src/doc/man/includes/description-new-authors.md --- cargo-0.53.0/src/doc/man/includes/description-new-authors.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/man/includes/description-new-authors.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): - -- `cargo-new.name` Cargo config value -- `CARGO_NAME` environment variable -- `GIT_AUTHOR_NAME` environment variable -- `GIT_COMMITTER_NAME` environment variable -- `user.name` git configuration value -- `USER` environment variable -- `USERNAME` environment variable -- `NAME` environment variable - -The email address is optional and is determined from: - -- `cargo-new.email` Cargo config value -- `CARGO_EMAIL` environment variable -- `GIT_AUTHOR_EMAIL` environment variable -- `GIT_COMMITTER_EMAIL` environment variable -- `user.email` git configuration value -- `EMAIL` environment variable - -See [the reference](../reference/config.html) for more information about -configuration files. diff -Nru cargo-0.53.0/src/doc/src/commands/cargo-build.md cargo-0.54.0/src/doc/src/commands/cargo-build.md --- cargo-0.53.0/src/doc/src/commands/cargo-build.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-build.md 2021-04-27 14:35:53.000000000 +0000 @@ -206,7 +206,7 @@

This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. -See https://github.com/rust-lang/cargo/issues/6790 for more information. +See https://github.com/rust-lang/cargo/issues/6790 for more information. diff -Nru cargo-0.53.0/src/doc/src/commands/cargo-init.md cargo-0.54.0/src/doc/src/commands/cargo-init.md --- cargo-0.53.0/src/doc/src/commands/cargo-init.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-init.md 2021-04-27 14:35:53.000000000 +0000 @@ -20,32 +20,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): - -- `cargo-new.name` Cargo config value -- `CARGO_NAME` environment variable -- `GIT_AUTHOR_NAME` environment variable -- `GIT_COMMITTER_NAME` environment variable -- `user.name` git configuration value -- `USER` environment variable -- `USERNAME` environment variable -- `NAME` environment variable - -The email address is optional and is determined from: - -- `cargo-new.email` Cargo config value -- `CARGO_EMAIL` environment variable -- `GIT_AUTHOR_EMAIL` environment variable -- `GIT_COMMITTER_EMAIL` environment variable -- `user.email` git configuration value -- `EMAIL` environment variable - -See [the reference](../reference/config.html) for more information about -configuration files. - - See [cargo-new(1)](cargo-new.html) for a similar command which will create a new package in a new directory. diff -Nru cargo-0.53.0/src/doc/src/commands/cargo-new.md cargo-0.54.0/src/doc/src/commands/cargo-new.md --- cargo-0.53.0/src/doc/src/commands/cargo-new.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-new.md 2021-04-27 14:35:53.000000000 +0000 @@ -15,32 +15,6 @@ and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): - -- `cargo-new.name` Cargo config value -- `CARGO_NAME` environment variable -- `GIT_AUTHOR_NAME` environment variable -- `GIT_COMMITTER_NAME` environment variable -- `user.name` git configuration value -- `USER` environment variable -- `USERNAME` environment variable -- `NAME` environment variable - -The email address is optional and is determined from: - -- `cargo-new.email` Cargo config value -- `CARGO_EMAIL` environment variable -- `GIT_AUTHOR_EMAIL` environment variable -- `GIT_COMMITTER_EMAIL` environment variable -- `user.email` git configuration value -- `EMAIL` environment variable - -See [the reference](../reference/config.html) for more information about -configuration files. - - See [cargo-init(1)](cargo-init.html) for a similar command which will create a new manifest in an existing directory. diff -Nru cargo-0.53.0/src/doc/src/faq.md cargo-0.54.0/src/doc/src/faq.md --- cargo-0.53.0/src/doc/src/faq.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/faq.md 2021-04-27 14:35:53.000000000 +0000 @@ -257,4 +257,4 @@ If after trying to debug your issue, however, you're still running into problems then feel free to [open an -issue](https://github.com/rust-lang/cargo/issuses/new)! +issue](https://github.com/rust-lang/cargo/issues/new)! diff -Nru cargo-0.53.0/src/doc/src/getting-started/first-steps.md cargo-0.54.0/src/doc/src/getting-started/first-steps.md --- cargo-0.53.0/src/doc/src/getting-started/first-steps.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/getting-started/first-steps.md 2021-04-27 14:35:53.000000000 +0000 @@ -33,7 +33,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] edition = "2018" [dependencies] diff -Nru cargo-0.53.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md cargo-0.54.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md --- cargo-0.53.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md 2021-04-27 14:35:53.000000000 +0000 @@ -27,7 +27,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] [dependencies] rand = { git = "https://github.com/rust-lang-nursery/rand.git" } @@ -63,7 +62,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] [dependencies] rand = { git = "https://github.com/rust-lang-nursery/rand.git" } diff -Nru cargo-0.53.0/src/doc/src/guide/creating-a-new-project.md cargo-0.54.0/src/doc/src/guide/creating-a-new-project.md --- cargo-0.53.0/src/doc/src/guide/creating-a-new-project.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/creating-a-new-project.md 2021-04-27 14:35:53.000000000 +0000 @@ -29,7 +29,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] edition = "2018" [dependencies] diff -Nru cargo-0.53.0/src/doc/src/guide/dependencies.md cargo-0.54.0/src/doc/src/guide/dependencies.md --- cargo-0.53.0/src/doc/src/guide/dependencies.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/dependencies.md 2021-04-27 14:35:53.000000000 +0000 @@ -35,7 +35,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] edition = "2018" [dependencies] diff -Nru cargo-0.53.0/src/doc/src/reference/cargo-targets.md cargo-0.54.0/src/doc/src/reference/cargo-targets.md --- cargo-0.53.0/src/doc/src/reference/cargo-targets.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/cargo-targets.md 2021-04-27 14:35:53.000000000 +0000 @@ -27,7 +27,7 @@ ### Binaries -Binary targets are executables programs that can be run after being compiled. +Binary targets are executable programs that can be run after being compiled. The default binary filename is `src/main.rs`, which defaults to the name of the package. Additional binaries are stored in the [`src/bin/` directory][package layout]. The settings for each binary can be [customized] diff -Nru cargo-0.53.0/src/doc/src/reference/config.md cargo-0.54.0/src/doc/src/reference/config.md --- cargo-0.53.0/src/doc/src/reference/config.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/config.md 2021-04-27 14:35:53.000000000 +0000 @@ -68,8 +68,6 @@ pipelining = true # rustc pipelining [cargo-new] -name = "Your Name" # name to use in `authors` field -email = "you@example.com" # email address to use in `authors` field vcs = "none" # VCS to use ('git', 'hg', 'pijul', 'fossil', 'none') [http] @@ -96,6 +94,7 @@ [profile.] # Modify profile settings via config. opt-level = 0 # Optimization level. debug = true # Include debug info. +split-debuginfo = '...' # Debug info splitting behavior. debug-assertions = true # Enables debug assertions. overflow-checks = true # Enables runtime integer overflow checks. lto = false # Sets link-time optimization. @@ -402,25 +401,12 @@ The `[cargo-new]` table defines defaults for the [`cargo new`] command. ##### `cargo-new.name` -* Type: string -* Default: from environment -* Environment: `CARGO_NAME` or `CARGO_CARGO_NEW_NAME` -Defines the name to use in the `authors` field when creating a new -`Cargo.toml` file. If not specified in the config, Cargo searches the -environment or your `git` configuration as described in the [`cargo new`] -documentation. +This option is deprecated and unused. ##### `cargo-new.email` -* Type: string -* Default: from environment -* Environment: `CARGO_EMAIL` or `CARGO_CARGO_NEW_EMAIL` -Defines the email address used in the `authors` field when creating a new -`Cargo.toml` file. If not specified in the config, Cargo searches the -environment or your `git` configuration as described in the [`cargo new`] -documentation. The `email` value may be set to an empty string to prevent -Cargo from placing an address in the authors field. +This option is deprecated and unused. ##### `cargo-new.vcs` * Type: string @@ -618,6 +604,13 @@ See [debug](profiles.md#debug). +##### `profile..split-debuginfo` +* Type: string +* Default: See profile docs. +* Environment: `CARGO_PROFILE__SPLIT_DEBUGINFO` + +See [split-debuginfo](profiles.md#split-debuginfo). + ##### `profile..debug-assertions` * Type: boolean * Default: See profile docs. diff -Nru cargo-0.53.0/src/doc/src/reference/environment-variables.md cargo-0.54.0/src/doc/src/reference/environment-variables.md --- cargo-0.53.0/src/doc/src/reference/environment-variables.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/environment-variables.md 2021-04-27 14:35:53.000000000 +0000 @@ -46,8 +46,6 @@ will otherwise be used. See also [`build.incremental`] config value. * `CARGO_CACHE_RUSTC_INFO` — If this is set to 0 then Cargo will not try to cache compiler version information. -* `CARGO_NAME` — The author name to use for [`cargo new`]. -* `CARGO_EMAIL` — The author email to use for [`cargo new`]. * `HTTPS_PROXY` or `https_proxy` or `http_proxy` — The HTTP proxy to use, see [`http.proxy`] for more detail. * `HTTP_TIMEOUT` — The HTTP timeout in seconds, see [`http.timeout`] for more @@ -78,8 +76,6 @@ * `CARGO_BUILD_INCREMENTAL` — Incremental compilation, see [`build.incremental`]. * `CARGO_BUILD_DEP_INFO_BASEDIR` — Dep-info relative directory, see [`build.dep-info-basedir`]. * `CARGO_BUILD_PIPELINING` — Whether or not to use `rustc` pipelining, see [`build.pipelining`]. -* `CARGO_CARGO_NEW_NAME` — The author name to use with [`cargo new`], see [`cargo-new.name`]. -* `CARGO_CARGO_NEW_EMAIL` — The author email to use with [`cargo new`], see [`cargo-new.email`]. * `CARGO_CARGO_NEW_VCS` — The default source control system with [`cargo new`], see [`cargo-new.vcs`]. * `CARGO_HTTP_DEBUG` — Enables HTTP debugging, see [`http.debug`]. * `CARGO_HTTP_PROXY` — Enables HTTP proxy, see [`http.proxy`]. @@ -334,6 +330,7 @@ changed by editing `.cargo/config.toml`; see the documentation about [cargo configuration][cargo-config] for more information. +* `CARGO_PKG_` - The package information variables, with the same names and values as are [provided during crate building][variables set for crates]. [unix-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows [windows-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows @@ -350,6 +347,7 @@ [jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html [cargo-config]: config.md [Target Triple]: ../appendix/glossary.md#target +[variables set for crates]: #environment-variables-cargo-sets-for-crates ### Environment variables Cargo sets for 3rd party subcommands diff -Nru cargo-0.53.0/src/doc/src/reference/features.md cargo-0.54.0/src/doc/src/reference/features.md --- cargo-0.53.0/src/doc/src/reference/features.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/features.md 2021-04-27 14:35:53.000000000 +0000 @@ -369,6 +369,9 @@ # This command is allowed with resolver = "2", regardless of which directory # you are in. cargo build -p foo -p bar --features foo-feat,bar-feat + +# This explicit equivalent works with any resolver version: +cargo build -p foo -p bar --features foo/foo-feat,bar/bar-feat ``` Additionally, with `resolver = "1"`, the `--no-default-features` flag only diff -Nru cargo-0.53.0/src/doc/src/reference/manifest.md cargo-0.54.0/src/doc/src/reference/manifest.md --- cargo-0.53.0/src/doc/src/reference/manifest.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/manifest.md 2021-04-27 14:35:53.000000000 +0000 @@ -107,14 +107,19 @@ #### The `authors` field -The `authors` field lists people or organizations that are considered the -"authors" of the package. The exact meaning is open to interpretation — it may -list the original or primary authors, current maintainers, or owners of the -package. These names will be listed on the crate's page on -[crates.io]. An optional email address may be included within angled -brackets at the end of each author. +The optional `authors` field lists people or organizations that are considered +the "authors" of the package. The exact meaning is open to interpretation — it +may list the original or primary authors, current maintainers, or owners of the +package. An optional email address may be included within angled brackets at +the end of each author entry. -> **Note**: [crates.io] requires at least one author to be listed. +This field is only surfaced in package metadata and in the `CARGO_PKG_AUTHORS` +environment variable within `build.rs`. It is not displayed in the [crates.io] +user interface. + +> **Warning**: Package manifests cannot be changed once published, so this +> field cannot be changed or removed in already-published versions of a +> package. #### The `edition` field diff -Nru cargo-0.53.0/src/doc/src/reference/overriding-dependencies.md cargo-0.54.0/src/doc/src/reference/overriding-dependencies.md --- cargo-0.53.0/src/doc/src/reference/overriding-dependencies.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/overriding-dependencies.md 2021-04-27 14:35:53.000000000 +0000 @@ -49,7 +49,6 @@ [package] name = "my-library" version = "0.1.0" -authors = ["..."] [dependencies] uuid = "1.0" @@ -131,7 +130,6 @@ [package] name = "my-library" version = "0.1.0" -authors = ["..."] [dependencies] uuid = "1.0.1" @@ -157,7 +155,6 @@ [package] name = "my-binary" version = "0.1.0" -authors = ["..."] [dependencies] my-library = { git = 'https://example.com/git/my-library' } @@ -212,7 +209,6 @@ [package] name = "my-binary" version = "0.1.0" -authors = ["..."] [dependencies] my-library = { git = 'https://example.com/git/my-library' } diff -Nru cargo-0.53.0/src/doc/src/reference/profiles.md cargo-0.54.0/src/doc/src/reference/profiles.md --- cargo-0.53.0/src/doc/src/reference/profiles.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/profiles.md 2021-04-27 14:35:53.000000000 +0000 @@ -83,10 +83,12 @@ executable itself or adjacent to it. This option is a string and acceptable values are the same as those the -[compiler accepts][`-C split-debuginfo` flag]. See that documentation for the -default behavior, which is platform-specific. Some options are only available -on the [nightly channel]. The default may change in the future once more -testing has been performed, and support for DWARF is stabilized. +[compiler accepts][`-C split-debuginfo` flag]. The default value for this option +is `unpacked` on macOS for profiles that have debug information otherwise +enabled. Otherwise the default for this option is [documented with rustc][`-C +split-debuginfo` flag] and is platform-specific. Some options are only +available on the [nightly channel]. The Cargo default may change in the future +once more testing has been performed, and support for DWARF is stabilized. [nightly channel]: ../../book/appendix-07-nightly-rust.html [`-C split-debuginfo` flag]: ../../rustc/codegen-options/index.html#split-debuginfo @@ -167,7 +169,7 @@ [`panic-abort-tests`] unstable flag which enables `abort` behavior. Additionally, when using the `abort` strategy and building a test, all of the -dependencies will also be forced to built with the `unwind` strategy. +dependencies will also be forced to build with the `unwind` strategy. [`-C panic` flag]: ../../rustc/codegen-options/index.html#panic [`panic-abort-tests`]: unstable.md#panic-abort-tests diff -Nru cargo-0.53.0/src/doc/src/reference/publishing.md cargo-0.54.0/src/doc/src/reference/publishing.md --- cargo-0.53.0/src/doc/src/reference/publishing.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/publishing.md 2021-04-27 14:35:53.000000000 +0000 @@ -34,7 +34,6 @@ ensure your crate can be discovered more easily! Before publishing, make sure you have filled out the following fields: -- [`authors`] - [`license` or `license-file`] - [`description`] - [`homepage`] @@ -258,7 +257,6 @@ [RFC 1105]: https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md [Rust API Guidelines]: https://rust-lang.github.io/api-guidelines/ -[`authors`]: manifest.md#the-authors-field [`cargo login`]: ../commands/cargo-login.md [`cargo package`]: ../commands/cargo-package.md [`cargo publish`]: ../commands/cargo-publish.md diff -Nru cargo-0.53.0/src/doc/src/reference/registries.md cargo-0.54.0/src/doc/src/reference/registries.md --- cargo-0.53.0/src/doc/src/reference/registries.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/registries.md 2021-04-27 14:35:53.000000000 +0000 @@ -178,7 +178,7 @@ `CrateTwo` have `{prefix}` values of `cr/at` and `Cr/at`; these are distinct on Unix machines but alias to the same directory on Windows. Using directories with normalized case avoids aliasing, but on case-sensitive filesystems it's -harder to suport older versions of Cargo that lack `{prefix}`/`{lowerprefix}`. +harder to support older versions of Cargo that lack `{prefix}`/`{lowerprefix}`. For example, nginx rewrite rules can easily construct `{prefix}` but can't perform case-conversion to construct `{lowerprefix}`. @@ -385,7 +385,7 @@ "extras": ["rand/simd_support"] }, // List of strings of the authors. - // May be empty. crates.io requires at least one entry. + // May be empty. "authors": ["Alice "], // Description field from the manifest. // May be null. crates.io requires at least some content. diff -Nru cargo-0.53.0/src/doc/src/reference/semver.md cargo-0.54.0/src/doc/src/reference/semver.md --- cargo-0.53.0/src/doc/src/reference/semver.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/semver.md 2021-04-27 14:35:53.000000000 +0000 @@ -235,7 +235,7 @@ Mitigation strategies: * Do not add new fields to all-public field structs. * Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing - an struct to prevent users from using struct literal syntax, and instead + a struct to prevent users from using struct literal syntax, and instead provide a constructor method and/or [Default] implementation. @@ -270,7 +270,7 @@ Mitigation strategies: * Do not add new new fields to all-public field structs. * Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing - an struct to prevent users from using struct literal syntax, and instead + a struct to prevent users from using struct literal syntax, and instead provide a constructor method and/or [Default] implementation. diff -Nru cargo-0.53.0/src/doc/src/reference/unstable.md cargo-0.54.0/src/doc/src/reference/unstable.md --- cargo-0.53.0/src/doc/src/reference/unstable.md 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/unstable.md 2021-04-27 14:35:53.000000000 +0000 @@ -58,6 +58,32 @@ [nightly channel]: ../../book/appendix-07-nightly-rust.html [stabilized]: https://doc.crates.io/contrib/process/unstable.html#stabilization +### allow-features + +This permanently-unstable flag makes it so that only a listed set of +unstable features can be used. Specifically, if you pass +`-Zallow-features=foo,bar`, you'll continue to be able to pass `-Zfoo` +and `-Zbar` to `cargo`, but you will be unable to pass `-Zbaz`. You can +pass an empty string (`-Zallow-features=`) to disallow all unstable +features. + +`-Zallow-features` also restricts which unstable features can be passed +to the `cargo-features` entry in `Cargo.toml`. If, for example, you want +to allow + +```toml +cargo-features = ["test-dummy-unstable"] +``` + +where `test-dummy-unstable` is unstable, that features would also be +disallowed by `-Zallow-features=`, and allowed with +`-Zallow-features=test-dummy-unstable`. + +The list of features passed to cargo's `-Zallow-features` is also passed +to any Rust tools that cargo ends up calling (like `rustc` or +`rustdoc`). Thus, if you run `cargo -Zallow-features=`, no unstable +Cargo _or_ Rust features can be used. + ### extra-link-arg * Original Pull Request: [#7811](https://github.com/rust-lang/cargo/pull/7811) @@ -869,6 +895,26 @@ dependency. However, unlike the normal `serde/std` syntax, it will not enable the optional dependency `serde` unless something else has included it. +### per-package-target + +The `per-package-target` feature adds two keys to the manifest: +`package.default-target` and `package.forced-target`. The first makes +the package be compiled by default (ie. when no `--target` argument is +passed) for some target. The second one makes the package always be +compiled for the target. + +Example: + +```toml +[package] +forced-target = "wasm32-unknown-unknown" +``` + +In this example, the crate is always built for +`wasm32-unknown-unknown`, for instance because it is going to be used +as a plugin for a main program that runs on the host (or provided on +the command line) target. + ### credential-process * Tracking Issue: [#8933](https://github.com/rust-lang/cargo/issues/8933) * RFC: [#2730](https://github.com/rust-lang/rfcs/pull/2730) @@ -1091,38 +1137,6 @@ [edition]: ../../edition-guide/index.html [resolver version]: resolver.md#resolver-versions - - ### future incompat report * RFC: [#2834](https://github.com/rust-lang/rfcs/blob/master/text/2834-cargo-report-future-incompat.md) * rustc Tracking Issue: [#71249](https://github.com/rust-lang/rust/issues/71249) @@ -1181,3 +1195,52 @@ Relative `path` dependencies in such a `[patch]` section are resolved relative to the configuration file they appear in. + +## `cargo config` + +* Original Issue: [#2362](https://github.com/rust-lang/cargo/issues/2362) +* Tracking Issue: [#9301](https://github.com/rust-lang/cargo/issues/9301) + +The `cargo config` subcommand provides a way to display the configuration +files that cargo loads. It currently includes the `get` subcommand which +can take an optional config value to display. + +```console +cargo +nightly -Zunstable-options config get build.rustflags +``` + +If no config value is included, it will display all config values. See the +`--help` output for more options available. + + + diff -Nru cargo-0.53.0/src/etc/man/cargo-build.1 cargo-0.54.0/src/etc/man/cargo-build.1 --- cargo-0.53.0/src/etc/man/cargo-build.1 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/etc/man/cargo-build.1 2021-04-27 14:35:53.000000000 +0000 @@ -192,7 +192,7 @@ This option is unstable and available only on the \fInightly channel\fR and requires the \fB\-Z unstable\-options\fR flag to enable. -See https://github.com/rust\-lang/cargo/issues/6790 for more information. +See for more information. .RE .SS "Display Options" .sp diff -Nru cargo-0.53.0/src/etc/man/cargo-init.1 cargo-0.54.0/src/etc/man/cargo-init.1 --- cargo-0.53.0/src/etc/man/cargo-init.1 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/etc/man/cargo-init.1 2021-04-27 14:35:53.000000000 +0000 @@ -18,71 +18,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see \fB\-\-vcs\fR below). .sp -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBcargo\-new.name\fR Cargo config value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_NAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_AUTHOR_NAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_COMMITTER_NAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBuser.name\fR git configuration value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBUSER\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBUSERNAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBNAME\fR environment variable -.RE -.sp -The email address is optional and is determined from: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBcargo\-new.email\fR Cargo config value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_EMAIL\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_AUTHOR_EMAIL\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_COMMITTER_EMAIL\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBuser.email\fR git configuration value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBEMAIL\fR environment variable -.RE -.sp -See \fIthe reference\fR for more information about -configuration files. -.sp See \fBcargo\-new\fR(1) for a similar command which will create a new package in a new directory. .SH "OPTIONS" diff -Nru cargo-0.53.0/src/etc/man/cargo-new.1 cargo-0.54.0/src/etc/man/cargo-new.1 --- cargo-0.53.0/src/etc/man/cargo-new.1 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/src/etc/man/cargo-new.1 2021-04-27 14:35:53.000000000 +0000 @@ -13,71 +13,6 @@ and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see \fB\-\-vcs\fR below). .sp -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBcargo\-new.name\fR Cargo config value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_NAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_AUTHOR_NAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_COMMITTER_NAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBuser.name\fR git configuration value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBUSER\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBUSERNAME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBNAME\fR environment variable -.RE -.sp -The email address is optional and is determined from: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBcargo\-new.email\fR Cargo config value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_EMAIL\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_AUTHOR_EMAIL\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBGIT_COMMITTER_EMAIL\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBuser.email\fR git configuration value -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBEMAIL\fR environment variable -.RE -.sp -See \fIthe reference\fR for more information about -configuration files. -.sp See \fBcargo\-init\fR(1) for a similar command which will create a new manifest in an existing directory. .SH "OPTIONS" diff -Nru cargo-0.53.0/tests/build-std/main.rs cargo-0.54.0/tests/build-std/main.rs --- cargo-0.53.0/tests/build-std/main.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/build-std/main.rs 2021-04-27 14:35:53.000000000 +0000 @@ -105,7 +105,16 @@ .build(); p.cargo("check").build_std().target_host().run(); - p.cargo("build").build_std().target_host().run(); + p.cargo("build") + .build_std() + .target_host() + // Importantly, this should not say [UPDATING] + // There have been multiple bugs where every build triggers and update. + .with_stderr( + "[COMPILING] foo v0.0.1 [..]\n\ + [FINISHED] dev [..]", + ) + .run(); p.cargo("run").build_std().target_host().run(); p.cargo("test").build_std().target_host().run(); diff -Nru cargo-0.53.0/tests/internal.rs cargo-0.54.0/tests/internal.rs --- cargo-0.53.0/tests/internal.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/internal.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,13 @@ //! Tests for internal code checks. + +#![allow(clippy::all)] + use std::fs; #[test] fn check_forbidden_code() { // Do not use certain macros, functions, etc. - if !cargo::util::is_ci() { + if !cargo_util::is_ci() { // Only check these on CI, otherwise it could be annoying. use std::io::Write; writeln!( diff -Nru cargo-0.53.0/tests/testsuite/bad_config.rs cargo-0.54.0/tests/testsuite/bad_config.rs --- cargo-0.53.0/tests/testsuite/bad_config.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/bad_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -94,7 +94,7 @@ ".cargo/config", r#" [cargo-new] - name = false + vcs = false "#, ) .build(); @@ -105,7 +105,7 @@ [ERROR] Failed to create package `foo` at `[..]` Caused by: - error in [..]config: `cargo-new.name` expected a string, but found a boolean + error in [..]config: `cargo-new.vcs` expected a string, but found a boolean ", ) .run(); @@ -1309,7 +1309,7 @@ error: failed to parse manifest at `[..]` Caused by: - invalid type: string \"a\", expected a boolean or an integer for [..] + expected a boolean or an integer for [..] ", ) .run(); @@ -1338,7 +1338,7 @@ error: failed to parse manifest at `[..]` Caused by: - invalid type: integer `3`, expected a boolean or a string for key [..] + expected a boolean or a string for key [..] ", ) .run(); @@ -1391,16 +1391,16 @@ .with_stderr( "\ [ERROR] error in [..]/foo/.cargo/config: \ -could not load config key `target.cfg(not(target_os = \"none\")).runner` +could not load config key `target.\"cfg(not(target_os = /\"none/\"))\".runner` Caused by: error in [..]/foo/.cargo/config: \ - could not load config key `target.cfg(not(target_os = \"none\")).runner` + could not load config key `target.\"cfg(not(target_os = /\"none/\"))\".runner` Caused by: - invalid configuration for key `target.cfg(not(target_os = \"none\")).runner` + invalid configuration for key `target.\"cfg(not(target_os = /\"none/\"))\".runner` expected a string or array of strings, but found a boolean for \ - `target.cfg(not(target_os = \"none\")).runner` in [..]/foo/.cargo/config + `target.\"cfg(not(target_os = /\"none/\"))\".runner` in [..]/foo/.cargo/config ", ) .run(); diff -Nru cargo-0.53.0/tests/testsuite/build.rs cargo-0.54.0/tests/testsuite/build.rs --- cargo-0.53.0/tests/testsuite/build.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/build.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,7 +4,6 @@ core::compiler::CompileMode, core::{Shell, Workspace}, ops::CompileOptions, - util::paths::dylib_path_envvar, Config, }; use cargo_test_support::paths::{root, CargoPathExt}; @@ -14,6 +13,7 @@ lines_match_unordered, main_file, paths, process, project, rustc_host, sleep_ms, symlink_supported, t, Execs, ProjectBuilder, }; +use cargo_util::paths::dylib_path_envvar; use std::env; use std::fs; use std::io::Read; @@ -5377,7 +5377,7 @@ .build(); p.cargo("generate-lockfile").run(); - cargo::util::paths::append(&p.root().join("Cargo.toml"), b"c = \"*\"").unwrap(); + cargo_util::paths::append(&p.root().join("Cargo.toml"), b"c = \"*\"").unwrap(); p.cargo("check").run(); p.cargo("check").run(); } diff -Nru cargo-0.53.0/tests/testsuite/build_script.rs cargo-0.54.0/tests/testsuite/build_script.rs --- cargo-0.53.0/tests/testsuite/build_script.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/build_script.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,15 +1,14 @@ //! Tests for build.rs scripts. -use std::env; -use std::fs; -use std::io; -use std::thread; - -use cargo::util::paths::remove_dir_all; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, cross_compile, is_coarse_mtime, project}; use cargo_test_support::{rustc_host, sleep_ms, slow_cpu_multiplier, symlink_supported}; +use cargo_util::paths::remove_dir_all; +use std::env; +use std::fs; +use std::io; +use std::thread; #[cargo_test] fn custom_build_script_failed() { @@ -3986,7 +3985,7 @@ .file("secrets/stuff", "") .build(); let path = p.root().join("secrets"); - fs::set_permissions(&path, fs::Permissions::from_mode(0)).unwrap(); + fs::set_permissions(&path, fs::Permissions::from_mode(0o0)).unwrap(); // The last "Caused by" is a string from libc such as the following: // Permission denied (os error 13) p.cargo("build") diff -Nru cargo-0.53.0/tests/testsuite/cargo_command.rs cargo-0.54.0/tests/testsuite/cargo_command.rs --- cargo-0.53.0/tests/testsuite/cargo_command.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/cargo_command.rs 2021-04-27 14:35:53.000000000 +0000 @@ -254,21 +254,18 @@ &my_home.join("config"), r#" [cargo-new] - name = "foo" - email = "bar" - git = false + vcs = "none" "#, ) .unwrap(); - cargo_process("new foo") - .env("USER", "foo") - .env("CARGO_HOME", &my_home) - .run(); + cargo_process("new foo").env("CARGO_HOME", &my_home).run(); - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["foo "]"#)); + assert!(!paths::root().join("foo/.git").is_dir()); + + cargo_process("new foo2").run(); + + assert!(paths::root().join("foo2/.git").is_dir()); } #[cargo_test] diff -Nru cargo-0.53.0/tests/testsuite/cargo_config.rs cargo-0.54.0/tests/testsuite/cargo_config.rs --- cargo-0.53.0/tests/testsuite/cargo_config.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/cargo_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,522 @@ +//! Tests for the `cargo config` command. + +use super::config::write_config_at; +use cargo_test_support::paths; +use std::fs; +use std::path::PathBuf; + +fn cargo_process(s: &str) -> cargo_test_support::Execs { + let mut p = cargo_test_support::cargo_process(s); + // Clear out some of the environment added by the default cargo_process so + // the tests don't need to deal with it. + p.env_remove("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO") + .env_remove("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO") + .env_remove("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO") + .env_remove("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO") + .env_remove("CARGO_INCREMENTAL"); + p +} + +#[cargo_test] +fn gated() { + cargo_process("config get") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr("\ +error: the `cargo config` command is unstable, pass `-Z unstable-options` to enable it +See https://github.com/rust-lang/cargo/issues/9301 for more information about the `cargo config` command. +") + .run(); +} + +fn common_setup() -> PathBuf { + write_config_at( + paths::home().join(".cargo/config.toml"), + " + [alias] + foo = \"abc --xyz\" + [build] + jobs = 99 + rustflags = [\"--flag-global\"] + [profile.dev] + opt-level = 3 + [profile.dev.package.foo] + opt-level = 1 + [target.'cfg(target_os = \"linux\")'] + runner = \"runme\" + + # How unknown keys are handled. + [extra-table] + somekey = \"somevalue\" + ", + ); + let sub_folder = paths::root().join("foo/.cargo"); + write_config_at( + sub_folder.join("config.toml"), + " + [alias] + sub-example = [\"sub\", \"example\"] + [build] + rustflags = [\"--flag-directory\"] + ", + ); + sub_folder +} + +#[cargo_test] +fn get_toml() { + // Notes: + // - The "extra-table" is shown without a warning. I'm not sure how that + // should be handled, since displaying warnings could cause problems + // with ingesting the output. + // - Environment variables aren't loaded. :( + let sub_folder = common_setup(); + cargo_process("config get -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_ALIAS_BAR", "cat dog") + .env("CARGO_BUILD_JOBS", "100") + // The weird forward slash in the linux line is due to testsuite normalization. + .with_stdout( + "\ +alias.foo = \"abc --xyz\" +alias.sub-example = [\"sub\", \"example\"] +build.jobs = 99 +build.rustflags = [\"--flag-directory\", \"--flag-global\"] +extra-table.somekey = \"somevalue\" +profile.dev.opt-level = 3 +profile.dev.package.foo.opt-level = 1 +target.\"cfg(target_os = /\"linux/\")\".runner = \"runme\" +# The following environment variables may affect the loaded values. +# CARGO_ALIAS_BAR=[..]cat dog[..] +# CARGO_BUILD_JOBS=100 +# CARGO_HOME=[ROOT]/home/.cargo +", + ) + .with_stderr("") + .run(); + + // Env keys work if they are specific. + cargo_process("config get build.jobs -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_BUILD_JOBS", "100") + .with_stdout("build.jobs = 100") + .with_stderr("") + .run(); + + // Array value. + cargo_process("config get build.rustflags -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout("build.rustflags = [\"--flag-directory\", \"--flag-global\"]") + .with_stderr("") + .run(); + + // Sub-table + cargo_process("config get profile -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +profile.dev.opt-level = 3 +profile.dev.package.foo.opt-level = 1 +", + ) + .with_stderr("") + .run(); + + // Specific profile entry. + cargo_process("config get profile.dev.opt-level -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout("profile.dev.opt-level = 3") + .with_stderr("") + .run(); + + // A key that isn't set. + cargo_process("config get build.rustc -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stdout("") + .with_stderr("error: config value `build.rustc` is not set") + .run(); + + // A key that is not part of Cargo's config schema. + cargo_process("config get not.set -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stdout("") + .with_stderr("error: config value `not.set` is not set") + .run(); +} + +#[cargo_test] +fn get_json() { + // Notes: + // - This does not show env vars at all. :( + let all_json = r#" + { + "alias": { + "foo": "abc --xyz", + "sub-example": [ + "sub", + "example" + ] + }, + "build": { + "jobs": 99, + "rustflags": [ + "--flag-directory", + "--flag-global" + ] + }, + "extra-table": { + "somekey": "somevalue" + }, + "profile": { + "dev": { + "opt-level": 3, + "package": { + "foo": { + "opt-level": 1 + } + } + } + }, + "target": { + "cfg(target_os = \"linux\")": { + "runner": "runme" + } + } + } + "#; + let sub_folder = common_setup(); + cargo_process("config get --format=json -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_ALIAS_BAR", "cat dog") + .env("CARGO_BUILD_JOBS", "100") + .with_json(all_json) + .with_stderr( + "\ +note: The following environment variables may affect the loaded values. +CARGO_ALIAS_BAR=[..]cat dog[..] +CARGO_BUILD_JOBS=100 +CARGO_HOME=[ROOT]/home/.cargo +", + ) + .run(); + + // json-value is the same for the entire root table + cargo_process("config get --format=json-value -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_json(all_json) + .with_stderr( + "\ +note: The following environment variables may affect the loaded values. +CARGO_HOME=[ROOT]/home/.cargo +", + ) + .run(); + + cargo_process("config get --format=json build.jobs -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_json( + r#" + {"build": {"jobs": 99}} + "#, + ) + .with_stderr("") + .run(); + + cargo_process("config get --format=json-value build.jobs -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout("99") + .with_stderr("") + .run(); +} + +#[cargo_test] +fn show_origin_toml() { + let sub_folder = common_setup(); + cargo_process("config get --show-origin -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +alias.foo = \"abc --xyz\" # [ROOT]/home/.cargo/config.toml +alias.sub-example = [ + \"sub\", # [ROOT]/foo/.cargo/config.toml + \"example\", # [ROOT]/foo/.cargo/config.toml +] +build.jobs = 99 # [ROOT]/home/.cargo/config.toml +build.rustflags = [ + \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml + \"--flag-global\", # [ROOT]/home/.cargo/config.toml +] +extra-table.somekey = \"somevalue\" # [ROOT]/home/.cargo/config.toml +profile.dev.opt-level = 3 # [ROOT]/home/.cargo/config.toml +profile.dev.package.foo.opt-level = 1 # [ROOT]/home/.cargo/config.toml +target.\"cfg(target_os = /\"linux/\")\".runner = \"runme\" # [ROOT]/home/.cargo/config.toml +# The following environment variables may affect the loaded values. +# CARGO_HOME=[ROOT]/home/.cargo +", + ) + .with_stderr("") + .run(); + + cargo_process("config get --show-origin build.rustflags -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") + .with_stdout( + "\ +build.rustflags = [ + \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml + \"--flag-global\", # [ROOT]/home/.cargo/config.toml + \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS` + \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS` +] +", + ) + .with_stderr("") + .run(); +} + +#[cargo_test] +fn show_origin_toml_cli() { + let sub_folder = common_setup(); + cargo_process("config get --show-origin build.jobs -Zunstable-options --config build.jobs=123") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_BUILD_JOBS", "1") + .with_stdout("build.jobs = 123 # --config cli option") + .with_stderr("") + .run(); + + cargo_process("config get --show-origin build.rustflags -Zunstable-options --config") + .arg("build.rustflags=[\"cli1\",\"cli2\"]") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") + .with_stdout( + "\ +build.rustflags = [ + \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml + \"--flag-global\", # [ROOT]/home/.cargo/config.toml + \"cli1\", # --config cli option + \"cli2\", # --config cli option + \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS` + \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS` +] +", + ) + .with_stderr("") + .run(); +} + +#[cargo_test] +fn show_origin_json() { + let sub_folder = common_setup(); + cargo_process("config get --show-origin --format=json -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr("error: the `json` format does not support --show-origin, try the `toml` format instead") + .run(); +} + +#[cargo_test] +fn unmerged_toml() { + let sub_folder = common_setup(); + cargo_process("config get --merged=no -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_ALIAS_BAR", "cat dog") + .env("CARGO_BUILD_JOBS", "100") + .with_stdout( + "\ +# Environment variables +# CARGO=[..] +# CARGO_ALIAS_BAR=[..]cat dog[..] +# CARGO_BUILD_JOBS=100 +# CARGO_HOME=[ROOT]/home/.cargo + +# [ROOT]/foo/.cargo/config.toml +alias.sub-example = [\"sub\", \"example\"] +build.rustflags = [\"--flag-directory\"] + +# [ROOT]/home/.cargo/config.toml +alias.foo = \"abc --xyz\" +build.jobs = 99 +build.rustflags = [\"--flag-global\"] +extra-table.somekey = \"somevalue\" +profile.dev.opt-level = 3 +profile.dev.package.foo.opt-level = 1 +target.\"cfg(target_os = /\"linux/\")\".runner = \"runme\" + +", + ) + .with_stderr("") + .run(); + + cargo_process("config get --merged=no build.rustflags -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") + .with_stdout( + "\ +# Environment variables +# CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..] + +# [ROOT]/foo/.cargo/config.toml +build.rustflags = [\"--flag-directory\"] + +# [ROOT]/home/.cargo/config.toml +build.rustflags = [\"--flag-global\"] + +", + ) + .with_stderr("") + .run(); + + cargo_process("config get --merged=no does.not.exist -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stderr("") + .with_stderr("") + .run(); + + cargo_process("config get --merged=no build.rustflags.extra -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "error: expected table for configuration key `build.rustflags`, \ + but found array in [ROOT]/foo/.cargo/config.toml", + ) + .run(); +} + +#[cargo_test] +fn unmerged_toml_cli() { + let sub_folder = common_setup(); + cargo_process("config get --merged=no build.rustflags -Zunstable-options --config") + .arg("build.rustflags=[\"cli1\",\"cli2\"]") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") + .with_stdout( + "\ +# --config cli option +build.rustflags = [\"cli1\", \"cli2\"] + +# Environment variables +# CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..] + +# [ROOT]/foo/.cargo/config.toml +build.rustflags = [\"--flag-directory\"] + +# [ROOT]/home/.cargo/config.toml +build.rustflags = [\"--flag-global\"] + +", + ) + .with_stderr("") + .run(); +} + +#[cargo_test] +fn unmerged_json() { + let sub_folder = common_setup(); + cargo_process("config get --merged=no --format=json -Zunstable-options") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "error: the `json` format does not support --merged=no, try the `toml` format instead", + ) + .run(); +} + +#[cargo_test] +fn includes() { + let sub_folder = common_setup(); + fs::write( + sub_folder.join("config.toml"), + " + include = 'other.toml' + [build] + rustflags = [\"--flag-directory\"] + ", + ) + .unwrap(); + fs::write( + sub_folder.join("other.toml"), + " + [build] + rustflags = [\"--flag-other\"] + ", + ) + .unwrap(); + + cargo_process("config get build.rustflags -Zunstable-options -Zconfig-include") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout(r#"build.rustflags = ["--flag-other", "--flag-directory", "--flag-global"]"#) + .with_stderr("") + .run(); + + cargo_process( + "config get build.rustflags --show-origin=yes -Zunstable-options -Zconfig-include", + ) + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +build.rustflags = [ + \"--flag-other\", # [ROOT]/foo/.cargo/other.toml + \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml + \"--flag-global\", # [ROOT]/home/.cargo/config.toml +] +", + ) + .with_stderr("") + .run(); + + cargo_process("config get --merged=no -Zunstable-options -Zconfig-include") + .cwd(&sub_folder.parent().unwrap()) + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +# Environment variables +# CARGO=[..] +# CARGO_HOME=[ROOT]/home/.cargo + +# [ROOT]/foo/.cargo/other.toml +build.rustflags = [\"--flag-other\"] + +# [ROOT]/foo/.cargo/config.toml +build.rustflags = [\"--flag-directory\"] +include = \"other.toml\" + +# [ROOT]/home/.cargo/config.toml +alias.foo = \"abc --xyz\" +build.jobs = 99 +build.rustflags = [\"--flag-global\"] +extra-table.somekey = \"somevalue\" +profile.dev.opt-level = 3 +profile.dev.package.foo.opt-level = 1 +target.\"cfg(target_os = /\"linux/\")\".runner = \"runme\" + +", + ) + .with_stderr("") + .run(); +} diff -Nru cargo-0.53.0/tests/testsuite/cargo_features.rs cargo-0.54.0/tests/testsuite/cargo_features.rs --- cargo-0.53.0/tests/testsuite/cargo_features.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/cargo_features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,6 @@ //! Tests for `cargo-features` definitions. -use cargo_test_support::{project, registry}; +use cargo_test_support::{is_nightly, project, registry}; #[cargo_test] fn feature_required() { @@ -111,6 +111,194 @@ ", ) .run(); +} + +#[cargo_test] +fn allow_features() { + if !is_nightly() { + // -Zallow-features on rustc is nightly only + eprintln!("skipping test allow_features without nightly rustc"); + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("-Zallow-features=test-dummy-unstable build") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("-Zallow-features=test-dummy-unstable,print-im-a-teapot -Zprint-im-a-teapot build") + .masquerade_as_nightly_cargo() + .with_stdout("im-a-teapot = true") + .run(); + + p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable] +", + ) + .run(); + + p.cargo("-Zallow-features= build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the feature `test-dummy-unstable` is not in the list of allowed features: [] +", + ) + .run(); +} + +#[cargo_test] +fn allow_features_to_rustc() { + if !is_nightly() { + // -Zallow-features on rustc is nightly only + eprintln!("skipping test allow_features_to_rustc without nightly rustc"); + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(test_2018_feature)] + "#, + ) + .build(); + + p.cargo("-Zallow-features= build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains("[..]E0725[..]") + .run(); + + p.cargo("-Zallow-features=test_2018_feature build") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn allow_features_in_cfg() { + if !is_nightly() { + // -Zallow-features on rustc is nightly only + eprintln!("skipping test allow_features_in_cfg without nightly rustc"); + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file( + ".cargo/config.toml", + r#" + [unstable] + allow-features = ["test-dummy-unstable", "print-im-a-teapot"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("-Zprint-im-a-teapot build") + .masquerade_as_nightly_cargo() + .with_stdout("im-a-teapot = true") + .with_stderr("[FINISHED] [..]") + .run(); + + p.cargo("-Zunstable-options build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: the feature `unstable-options` is not in the list of allowed features: [print-im-a-teapot, test-dummy-unstable] +", + ) + .run(); + + // -Zallow-features overrides .cargo/config + p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable] +", + ) + .run(); + + p.cargo("-Zallow-features= build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the feature `test-dummy-unstable` is not in the list of allowed features: [] +", + ) + .run(); } #[cargo_test] diff -Nru cargo-0.53.0/tests/testsuite/config_include.rs cargo-0.54.0/tests/testsuite/config_include.rs --- cargo-0.53.0/tests/testsuite/config_include.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/config_include.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,22 +1,23 @@ //! Tests for `include` config field. -use super::config::{ - assert_error, assert_match, read_output, write_config, write_config_at, ConfigBuilder, -}; -use cargo_test_support::{no_such_file_err_msg, paths}; +use super::config::{assert_error, write_config, write_config_at, ConfigBuilder}; +use cargo_test_support::{no_such_file_err_msg, paths, project}; use std::fs; #[cargo_test] fn gated() { // Requires -Z flag. write_config("include='other'"); + write_config_at( + ".cargo/other", + " + othervalue = 1 + ", + ); let config = ConfigBuilder::new().build(); - let output = read_output(config); - let expected = "\ -warning: config `include` in `[..]/.cargo/config` ignored, \ -the -Zconfig-include command-line flag is required -"; - assert_match(expected, &output); + assert_eq!(config.get::>("othervalue").unwrap(), None); + let config = ConfigBuilder::new().unstable_flag("config-include").build(); + assert_eq!(config.get::("othervalue").unwrap(), 1); } #[cargo_test] @@ -44,6 +45,45 @@ } #[cargo_test] +fn works_with_cli() { + write_config_at( + ".cargo/config.toml", + " + include = 'other.toml' + [build] + rustflags = ['-W', 'unused'] + ", + ); + write_config_at( + ".cargo/other.toml", + " + [build] + rustflags = ['-W', 'unsafe-code'] + ", + ); + let p = project().file("src/lib.rs", "").build(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 [..] +[RUNNING] `rustc [..]-W unused` +[FINISHED] [..] +", + ) + .run(); + p.cargo("build -v -Z config-include") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] foo v0.0.1 [..] +[RUNNING] `rustc [..]-W unsafe-code -W unused` +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] fn left_to_right() { // How it merges multiple includes. write_config_at( @@ -77,9 +117,11 @@ fn missing_file() { // Error when there's a missing file. write_config("include='missing'"); - let config = ConfigBuilder::new().unstable_flag("config-include").build(); + let config = ConfigBuilder::new() + .unstable_flag("config-include") + .build_err(); assert_error( - config.get::("whatever").unwrap_err(), + config.unwrap_err(), &format!( "\ could not load Cargo configuration @@ -103,9 +145,11 @@ write_config_at(".cargo/config", "include='one'"); write_config_at(".cargo/one", "include='two'"); write_config_at(".cargo/two", "include='config'"); - let config = ConfigBuilder::new().unstable_flag("config-include").build(); + let config = ConfigBuilder::new() + .unstable_flag("config-include") + .build_err(); assert_error( - config.get::("whatever").unwrap_err(), + config.unwrap_err(), "\ could not load Cargo configuration @@ -147,9 +191,11 @@ fn bad_format() { // Not a valid format. write_config("include = 1"); - let config = ConfigBuilder::new().unstable_flag("config-include").build(); + let config = ConfigBuilder::new() + .unstable_flag("config-include") + .build_err(); assert_error( - config.get::("whatever").unwrap_err(), + config.unwrap_err(), "\ could not load Cargo configuration diff -Nru cargo-0.53.0/tests/testsuite/corrupt_git.rs cargo-0.54.0/tests/testsuite/corrupt_git.rs --- cargo-0.53.0/tests/testsuite/corrupt_git.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/corrupt_git.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,10 @@ //! Tests for corrupt git repos. -use std::fs; -use std::path::{Path, PathBuf}; - -use cargo::util::paths as cargopaths; use cargo_test_support::paths; use cargo_test_support::{basic_manifest, git, project}; +use cargo_util::paths as cargopaths; +use std::fs; +use std::path::{Path, PathBuf}; #[cargo_test] fn deleting_database_files() { diff -Nru cargo-0.53.0/tests/testsuite/credential_process.rs cargo-0.54.0/tests/testsuite/credential_process.rs --- cargo-0.53.0/tests/testsuite/credential_process.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/credential_process.rs 2021-04-27 14:35:53.000000000 +0000 @@ -209,7 +209,7 @@ fn basic_unsupported() { // Non-action commands don't support login/logout. registry::RegistryBuilder::new().add_tokens(false).build(); - cargo::util::paths::append( + cargo_util::paths::append( &paths::home().join(".cargo/config"), br#" [registry] @@ -271,7 +271,7 @@ .build(); cred_proj.cargo("build").run(); - cargo::util::paths::append( + cargo_util::paths::append( &paths::home().join(".cargo/config"), format!( r#" @@ -323,7 +323,7 @@ .build(); cred_proj.cargo("build").run(); - cargo::util::paths::append( + cargo_util::paths::append( &paths::home().join(".cargo/config"), format!( r#" @@ -390,7 +390,7 @@ fn libexec_path() { // cargo: prefixed names use the sysroot registry::RegistryBuilder::new().add_tokens(false).build(); - cargo::util::paths::append( + cargo_util::paths::append( &paths::home().join(".cargo/config"), br#" [registry] @@ -428,7 +428,7 @@ .build(); cred_proj.cargo("build").run(); - cargo::util::paths::append( + cargo_util::paths::append( &paths::home().join(".cargo/config"), format!( r#" diff -Nru cargo-0.53.0/tests/testsuite/cross_compile.rs cargo-0.54.0/tests/testsuite/cross_compile.rs --- cargo-0.53.0/tests/testsuite/cross_compile.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/cross_compile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -153,6 +153,209 @@ } } +/// Always take care of setting these so that +/// `cross_compile::alternate()` is the actually-picked target +fn per_crate_target_test( + default_target: Option<&'static str>, + forced_target: Option<&'static str>, + arg_target: Option<&'static str>, +) { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + cargo-features = ["per-package-target"] + + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + {} + {} + "#, + default_target + .map(|t| format!(r#"default-target = "{}""#, t)) + .unwrap_or(String::new()), + forced_target + .map(|t| format!(r#"forced-target = "{}""#, t)) + .unwrap_or(String::new()), + ), + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::alternate() + ), + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let mut cmd = p.cargo("build -v"); + if let Some(t) = arg_target { + cmd.arg("--target").arg(&t); + } + cmd.masquerade_as_nightly_cargo().run(); + assert!(p.target_bin(cross_compile::alternate(), "foo").is_file()); + + if cross_compile::can_run_on_host() { + p.process(&p.target_bin(cross_compile::alternate(), "foo")) + .run(); + } +} + +#[cargo_test] +fn per_crate_default_target_is_default() { + per_crate_target_test(Some(cross_compile::alternate()), None, None); +} + +#[cargo_test] +fn per_crate_default_target_gets_overridden() { + per_crate_target_test( + Some(cross_compile::unused()), + None, + Some(cross_compile::alternate()), + ); +} + +#[cargo_test] +fn per_crate_forced_target_is_default() { + per_crate_target_test(None, Some(cross_compile::alternate()), None); +} + +#[cargo_test] +fn per_crate_forced_target_does_not_get_overridden() { + per_crate_target_test( + None, + Some(cross_compile::alternate()), + Some(cross_compile::unused()), + ); +} + +#[cargo_test] +fn workspace_with_multiple_targets() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["native", "cross"] + "#, + ) + .file( + "native/Cargo.toml", + r#" + cargo-features = ["per-package-target"] + + [package] + name = "native" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "native/build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::native() + ), + ) + .file( + "native/src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::native_arch() + ), + ) + .file( + "cross/Cargo.toml", + &format!( + r#" + cargo-features = ["per-package-target"] + + [package] + name = "cross" + version = "0.0.0" + authors = [] + build = "build.rs" + default-target = "{}" + "#, + cross_compile::alternate(), + ), + ) + .file( + "cross/build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::alternate() + ), + ) + .file( + "cross/src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let mut cmd = p.cargo("build -v"); + cmd.masquerade_as_nightly_cargo().run(); + + assert!(p.bin("native").is_file()); + assert!(p.target_bin(cross_compile::alternate(), "cross").is_file()); + + p.process(&p.bin("native")).run(); + if cross_compile::can_run_on_host() { + p.process(&p.target_bin(cross_compile::alternate(), "cross")) + .run(); + } +} + #[cargo_test] fn linker() { if cross_compile::disabled() { diff -Nru cargo-0.53.0/tests/testsuite/doc.rs cargo-0.54.0/tests/testsuite/doc.rs --- cargo-0.53.0/tests/testsuite/doc.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/doc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,9 +1,10 @@ //! Tests for the `cargo doc` command. +use cargo::core::compiler::RustDocFingerprint; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project}; -use cargo_test_support::{is_nightly, rustc_host}; +use cargo_test_support::{is_nightly, rustc_host, symlink_supported}; use std::fs; use std::str; @@ -862,10 +863,44 @@ r#"#[cfg(feature = "bar")] pub fn bar() {}"#, ) .build(); - p.cargo("doc --features foo").run(); + p.cargo("doc --features foo") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 [..] +[DOCUMENTING] bar v0.0.1 [..] +[DOCUMENTING] foo v0.0.1 [..] +[FINISHED] [..] +", + ) + .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); + // Check that turning the feature off will remove the files. + p.cargo("doc") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 [..] +[DOCUMENTING] bar v0.0.1 [..] +[DOCUMENTING] foo v0.0.1 [..] +[FINISHED] [..] +", + ) + .run(); + assert!(!p.root().join("target/doc/foo/fn.foo.html").is_file()); + assert!(!p.root().join("target/doc/bar/fn.bar.html").is_file()); + // And switching back will rebuild and bring them back. + p.cargo("doc --features foo") + .with_stderr( + "\ +[DOCUMENTING] bar v0.0.1 [..] +[DOCUMENTING] foo v0.0.1 [..] +[FINISHED] [..] +", + ) + .run(); + assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); + assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); } #[cargo_test] @@ -1715,3 +1750,234 @@ ) .run(); } + +#[cargo_test] +fn doc_fingerprint_is_versioning_consistent() { + // Random rustc verbose version + let old_rustc_verbose_version = format!( + "\ +rustc 1.41.1 (f3e1a954d 2020-02-24) +binary: rustc +commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 +commit-date: 2020-02-24 +host: {} +release: 1.41.1 +LLVM version: 9.0 +", + rustc_host() + ); + + // Create the dummy project. + let dummy_project = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.2.4" + authors = [] + "#, + ) + .file("src/lib.rs", "//! These are the docs!") + .build(); + + dummy_project.cargo("doc").run(); + + let fingerprint: RustDocFingerprint = + serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) + .expect("JSON Serde fail"); + + // Check that the fingerprint contains the actual rustc version + // which has been used to compile the docs. + let output = std::process::Command::new("rustc") + .arg("-vV") + .output() + .expect("Failed to get actual rustc verbose version"); + assert_eq!( + fingerprint.rustc_vv, + (String::from_utf8_lossy(&output.stdout).as_ref()) + ); + + // As the test shows above. Now we have generated the `doc/` folder and inside + // the rustdoc fingerprint file is located with the correct rustc version. + // So we will remove it and create a new fingerprint with an old rustc version + // inside it. We will also place a bogus file inside of the `doc/` folder to ensure + // it gets removed as we expect on the next doc compilation. + dummy_project.change_file( + "target/.rustdoc_fingerprint.json", + &old_rustc_verbose_version, + ); + + fs::write( + dummy_project.build_dir().join("doc/bogus_file"), + String::from("This is a bogus file and should be removed!"), + ) + .expect("Error writing test bogus file"); + + // Now if we trigger another compilation, since the fingerprint contains an old version + // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint) + // and generating another one with the actual version. + // It should also remove the bogus file we created above. + dummy_project.cargo("doc").run(); + + assert!(!dummy_project.build_dir().join("doc/bogus_file").exists()); + + let fingerprint: RustDocFingerprint = + serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) + .expect("JSON Serde fail"); + + // Check that the fingerprint contains the actual rustc version + // which has been used to compile the docs. + assert_eq!( + fingerprint.rustc_vv, + (String::from_utf8_lossy(&output.stdout).as_ref()) + ); +} + +#[cargo_test] +fn doc_fingerprint_respects_target_paths() { + // Random rustc verbose version + let old_rustc_verbose_version = format!( + "\ +rustc 1.41.1 (f3e1a954d 2020-02-24) +binary: rustc +commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 +commit-date: 2020-02-24 +host: {} +release: 1.41.1 +LLVM version: 9.0 +", + rustc_host() + ); + + // Create the dummy project. + let dummy_project = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.2.4" + authors = [] + "#, + ) + .file("src/lib.rs", "//! These are the docs!") + .build(); + + dummy_project.cargo("doc --target").arg(rustc_host()).run(); + + let fingerprint: RustDocFingerprint = + serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) + .expect("JSON Serde fail"); + + // Check that the fingerprint contains the actual rustc version + // which has been used to compile the docs. + let output = std::process::Command::new("rustc") + .arg("-vV") + .output() + .expect("Failed to get actual rustc verbose version"); + assert_eq!( + fingerprint.rustc_vv, + (String::from_utf8_lossy(&output.stdout).as_ref()) + ); + + // As the test shows above. Now we have generated the `doc/` folder and inside + // the rustdoc fingerprint file is located with the correct rustc version. + // So we will remove it and create a new fingerprint with an old rustc version + // inside it. We will also place a bogus file inside of the `doc/` folder to ensure + // it gets removed as we expect on the next doc compilation. + dummy_project.change_file( + "target/.rustdoc_fingerprint.json", + &old_rustc_verbose_version, + ); + + fs::write( + dummy_project + .build_dir() + .join(rustc_host()) + .join("doc/bogus_file"), + String::from("This is a bogus file and should be removed!"), + ) + .expect("Error writing test bogus file"); + + // Now if we trigger another compilation, since the fingerprint contains an old version + // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint) + // and generating another one with the actual version. + // It should also remove the bogus file we created above. + dummy_project.cargo("doc --target").arg(rustc_host()).run(); + + assert!(!dummy_project + .build_dir() + .join(rustc_host()) + .join("doc/bogus_file") + .exists()); + + let fingerprint: RustDocFingerprint = + serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) + .expect("JSON Serde fail"); + + // Check that the fingerprint contains the actual rustc version + // which has been used to compile the docs. + assert_eq!( + fingerprint.rustc_vv, + (String::from_utf8_lossy(&output.stdout).as_ref()) + ); +} + +#[cargo_test] +fn doc_fingerprint_unusual_behavior() { + // Checks for some unusual circumstances with clearing the doc directory. + if !symlink_supported() { + return; + } + let p = project().file("src/lib.rs", "").build(); + p.build_dir().mkdir_p(); + let real_doc = p.root().join("doc"); + real_doc.mkdir_p(); + let build_doc = p.build_dir().join("doc"); + p.symlink(&real_doc, &build_doc); + fs::write(real_doc.join("somefile"), "test").unwrap(); + fs::write(real_doc.join(".hidden"), "test").unwrap(); + p.cargo("doc").run(); + // Make sure for the first run, it does not delete any files and does not + // break the symlink. + assert!(build_doc.join("somefile").exists()); + assert!(real_doc.join("somefile").exists()); + assert!(real_doc.join(".hidden").exists()); + assert!(real_doc.join("foo/index.html").exists()); + // Pretend that the last build was generated by an older version. + p.change_file( + "target/.rustdoc_fingerprint.json", + "{\"rustc_vv\": \"I am old\"}", + ); + // Change file to trigger a new build. + p.change_file("src/lib.rs", "// changed"); + p.cargo("doc") + .with_stderr( + "[DOCUMENTING] foo [..]\n\ + [FINISHED] [..]", + ) + .run(); + // This will delete somefile, but not .hidden. + assert!(!real_doc.join("somefile").exists()); + assert!(real_doc.join(".hidden").exists()); + assert!(real_doc.join("foo/index.html").exists()); + // And also check the -Z flag behavior. + p.change_file( + "target/.rustdoc_fingerprint.json", + "{\"rustc_vv\": \"I am old\"}", + ); + // Change file to trigger a new build. + p.change_file("src/lib.rs", "// changed2"); + fs::write(real_doc.join("somefile"), "test").unwrap(); + p.cargo("doc -Z skip-rustdoc-fingerprint") + .masquerade_as_nightly_cargo() + .with_stderr( + "[DOCUMENTING] foo [..]\n\ + [FINISHED] [..]", + ) + .run(); + // Should not have deleted anything. + assert!(build_doc.join("somefile").exists()); + assert!(real_doc.join("somefile").exists()); +} diff -Nru cargo-0.53.0/tests/testsuite/features2.rs cargo-0.54.0/tests/testsuite/features2.rs --- cargo-0.53.0/tests/testsuite/features2.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/features2.rs 2021-04-27 14:35:53.000000000 +0000 @@ -201,7 +201,7 @@ Package::new("hostdep", "1.0.0").publish(); Package::new("pm", "1.0.0") .proc_macro(true) - .target_dep("hostdep", "1.0", &rustc_host()) + .target_dep("hostdep", "1.0", rustc_host()) .file("src/lib.rs", "extern crate hostdep;") .publish(); let p = project() @@ -1203,7 +1203,7 @@ Package::new("targetdep", "1.0.0").publish(); Package::new("hostdep", "1.0.0") // Check that "for_host" is sticky. - .target_dep("somedep", "1.0", &rustc_host()) + .target_dep("somedep", "1.0", rustc_host()) .feature("feat1", &[]) .file( "src/lib.rs", diff -Nru cargo-0.53.0/tests/testsuite/features_namespaced.rs cargo-0.54.0/tests/testsuite/features_namespaced.rs --- cargo-0.53.0/tests/testsuite/features_namespaced.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/features_namespaced.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,6 @@ //! Tests for namespaced features. +use super::features2::switch_to_resolver_2; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{project, publish}; @@ -633,8 +634,11 @@ .with_status(101) .with_stderr( "\ -[UPDATING] [..] -[ERROR] feature value `dep:baz` is not allowed to use explicit `dep:` syntax +error: failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + feature `dep:baz` in dependency `bar` is not allowed to use explicit `dep:` syntax + If you want to enable [..] ", ) .run(); @@ -664,8 +668,18 @@ .with_status(101) .with_stderr( "\ -[UPDATING] [..] -[ERROR] feature value `dep:bar` is not allowed to use explicit `dep:` syntax +[ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax +", + ) + .run(); + + switch_to_resolver_2(&p); + p.cargo("check -Z namespaced-features --features dep:bar") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax ", ) .run(); @@ -996,6 +1010,57 @@ ", ) .run(); +} + +#[cargo_test] +fn tree_no_implicit() { + // tree without an implicit feature + Package::new("bar", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { version = "1.0", optional=true } + + [features] + a = ["dep:bar"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("tree -e features -Z namespaced-features") + .masquerade_as_nightly_cargo() + .with_stdout("foo v0.1.0 ([ROOT]/foo)") + .run(); + + p.cargo("tree -e features --all-features -Z namespaced-features") + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +foo v0.1.0 ([ROOT]/foo) +└── bar feature \"default\" + └── bar v1.0.0 +", + ) + .run(); + + p.cargo("tree -e features -i bar --all-features -Z namespaced-features") + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +bar v1.0.0 +└── bar feature \"default\" + └── foo v0.1.0 ([ROOT]/foo) + └── foo feature \"a\" (command-line) +", + ) + .run(); } #[cargo_test] diff -Nru cargo-0.53.0/tests/testsuite/features.rs cargo-0.54.0/tests/testsuite/features.rs --- cargo-0.53.0/tests/testsuite/features.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -269,7 +269,15 @@ p.cargo("build --features foo") .with_status(101) - .with_stderr("[ERROR] feature names may not contain slashes: `foo/bar`") + .with_stderr( + "\ +error: failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + feature `foo/bar` in dependency `bar` is not allowed to contain slashes + If you want to enable features [..] +", + ) .run(); } @@ -409,7 +417,14 @@ .build(); p.cargo("build") .with_status(101) - .with_stderr("[ERROR] feature names may not contain slashes: `bar/qux`") + .with_stderr( + "\ +error: failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + multiple slashes in feature `derived/bar/qux` (included by feature `default`) are not allowed +", + ) .run(); } @@ -1192,7 +1207,7 @@ // Hierarchical feature specification should still be disallowed p.cargo("build --features derived/bar/some-feat") .with_status(101) - .with_stderr("[ERROR] feature names may not contain slashes: `bar/some-feat`") + .with_stderr("[ERROR] multiple slashes in feature `derived/bar/some-feat` is not allowed") .run(); } @@ -1906,7 +1921,7 @@ } #[cargo_test] -fn invalid_feature_names() { +fn invalid_feature_names_warning() { // Warnings for more restricted feature syntax. let p = project() .file( @@ -1929,7 +1944,6 @@ "+foo" = [] "-foo" = [] ".foo" = [] - "foo/bar" = [] "foo:bar" = [] "foo?" = [] "?foo" = [] @@ -1961,9 +1975,6 @@ [WARNING] invalid character `¼` in feature `a¼` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `/` in feature `foo/bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `:` in feature `foo:bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. @@ -1994,9 +2005,6 @@ [WARNING] invalid character `¼` in feature `a¼` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `/` in feature `foo/bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `:` in feature `foo:bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. @@ -2017,3 +2025,34 @@ ") .run(); } + +#[cargo_test] +fn invalid_feature_names_error() { + // Errors for more restricted feature syntax. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [features] + "foo/bar" = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("check") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + feature named `foo/bar` is not allowed to contain slashes +", + ) + .run(); +} diff -Nru cargo-0.53.0/tests/testsuite/freshness.rs cargo-0.54.0/tests/testsuite/freshness.rs --- cargo-0.53.0/tests/testsuite/freshness.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/freshness.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2581,3 +2581,63 @@ p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } + +#[cargo_test] +fn cargo_env_changes() { + // Checks that changes to the env var CARGO in the dep-info file triggers + // a rebuild. + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) + .file( + "src/main.rs", + r#" + fn main() { + println!("{:?}", env!("CARGO")); + } + "#, + ) + .build(); + + let cargo_exe = cargo_test_support::cargo_exe(); + let other_cargo_path = p.root().join(cargo_exe.file_name().unwrap()); + std::fs::hard_link(&cargo_exe, &other_cargo_path).unwrap(); + let other_cargo = || { + let mut pb = cargo_test_support::process(&other_cargo_path); + pb.cwd(p.root()); + cargo_test_support::execs().with_process_builder(pb) + }; + + p.cargo("check").run(); + other_cargo() + .arg("check") + .arg("-v") + .with_stderr( + "\ +[CHECKING] foo [..] +[RUNNING] `rustc [..] +[FINISHED] [..] +", + ) + .run(); + + // And just to confirm that without using env! it doesn't rebuild. + p.change_file("src/main.rs", "fn main() {}"); + p.cargo("check") + .with_stderr( + "\ +[CHECKING] foo [..] +[FINISHED] [..] +", + ) + .run(); + other_cargo() + .arg("check") + .arg("-v") + .with_stderr( + "\ +[FRESH] foo [..] +[FINISHED] [..] +", + ) + .run(); +} diff -Nru cargo-0.53.0/tests/testsuite/future_incompat_report.rs cargo-0.54.0/tests/testsuite/future_incompat_report.rs --- cargo-0.53.0/tests/testsuite/future_incompat_report.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/future_incompat_report.rs 2021-04-27 14:35:53.000000000 +0000 @@ -52,6 +52,23 @@ } #[cargo_test] +fn test_zero_future_incompat() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --future-incompat-report -Z unstable-options -Z future-incompat-report") + .masquerade_as_nightly_cargo() + .with_stderr_contains("note: 0 dependencies had future-incompat warnings") + .run(); +} + +#[cargo_test] fn test_single_crate() { if !is_nightly() { return; @@ -147,7 +164,7 @@ let stderr = std::str::from_utf8(&output.stderr).unwrap(); // Find '--id ' in the output - let mut iter = stderr.split(" "); + let mut iter = stderr.split(' '); iter.find(|w| *w == "--id").unwrap(); let id = iter .next() diff -Nru cargo-0.53.0/tests/testsuite/git.rs cargo-0.54.0/tests/testsuite/git.rs --- cargo-0.53.0/tests/testsuite/git.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/git.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2805,7 +2805,7 @@ // Then create a commit on the new `main` branch so `master` and `main` // differ. - git_project.change_file("src/lib.rs", ""); + git_project.change_file("src/lib.rs", "pub fn bar() {}"); git::add(&repo); git::commit(&repo); @@ -2817,14 +2817,13 @@ [project] name = "foo" version = "0.5.0" - [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url() ), ) - .file("src/lib.rs", "pub fn foo() { dep1::foo() }") + .file("src/lib.rs", "pub fn foo() { dep1::bar() }") .build(); project @@ -2832,14 +2831,6 @@ .with_stderr( "\ [UPDATING] git repository `[..]` -warning: fetching `master` branch from `[..]` but the `HEAD` \ - reference for this repository is not the \ - `master` branch. This behavior will change \ - in Cargo in the future and your build may \ - break, so it's recommended to place \ - `branch = \"master\"` in Cargo.toml when \ - depending on this git repository to ensure \ - that your build will continue to work. [COMPILING] dep1 v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", @@ -2977,7 +2968,6 @@ [project] name = "foo" version = "0.5.0" - [dependencies] dep1 = {{ git = '{}', branch = 'master' }} a = {{ path = 'a' }} @@ -2993,7 +2983,6 @@ [project] name = "a" version = "0.5.0" - [dependencies] dep1 = {{ git = '{}' }} "#, @@ -3003,15 +2992,16 @@ .file("a/src/lib.rs", "") .build(); + // This'll download the git repository twice, one with HEAD and once with + // the master branch. Then it'll compile 4 crates, the 2 git deps, then + // the two local deps. project .cargo("build") .with_stderr( "\ [UPDATING] [..] -warning: two git dependencies found for `[..]` where one uses `branch = \"master\"` \ -and the other doesn't; this will break in a future version of Cargo, so please \ -ensure the dependency forms are consistent -warning: [..] +[UPDATING] [..] +[COMPILING] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] [..] diff -Nru cargo-0.53.0/tests/testsuite/help.rs cargo-0.54.0/tests/testsuite/help.rs --- cargo-0.53.0/tests/testsuite/help.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/help.rs 2021-04-27 14:35:53.000000000 +0000 @@ -48,7 +48,9 @@ // Test that the output of `cargo -Z help` shows a different help screen with // all the `-Z` flags. cargo_process("-Z help") - .with_stdout_contains(" -Z unstable-options -- Allow the usage of unstable options") + .with_stdout_contains( + " -Z allow-features[..]-- Allow *only* the listed unstable features", + ) .run(); } diff -Nru cargo-0.53.0/tests/testsuite/init.rs cargo-0.54.0/tests/testsuite/init.rs --- cargo-0.53.0/tests/testsuite/init.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/init.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,7 +26,6 @@ #[cargo_test] fn simple_lib() { cargo_process("init --lib --vcs none --edition 2015") - .env("USER", "foo") .with_stderr("[CREATED] library package") .run(); @@ -42,7 +41,6 @@ let path = paths::root().join("foo"); fs::create_dir(&path).unwrap(); cargo_process("init --bin --vcs none --edition 2015") - .env("USER", "foo") .cwd(&path) .with_stderr("[CREATED] binary (application) package") .run(); @@ -66,9 +64,7 @@ ) .unwrap(); - cargo_process("init --lib foo --edition 2015") - .env("USER", "foo") - .run(); + cargo_process("init --lib foo --edition 2015").run(); assert!(paths::root().is_dir()); assert!(paths::root().join("foo/Cargo.toml").is_file()); @@ -99,9 +95,7 @@ fs::create_dir_all(paths::root().join("foo")).unwrap(); fs::write(paths::root().join("foo/.gitignore"), "**/some.file").unwrap(); - cargo_process("init --lib foo --edition 2015") - .env("USER", "foo") - .run(); + cargo_process("init --lib foo --edition 2015").run(); let fp = paths::root().join("foo/.gitignore"); let contents = fs::read_to_string(&fp).unwrap(); @@ -118,7 +112,6 @@ #[cargo_test] fn both_lib_and_bin() { cargo_process("init --lib --bin") - .env("USER", "foo") .with_status(101) .with_stderr("[ERROR] can't specify both lib and binary outputs") .run(); @@ -139,15 +132,9 @@ fs::write(&sourcefile_path, content).unwrap(); if explicit { - cargo_process("init --bin --vcs none") - .env("USER", "foo") - .cwd(&path) - .run(); + cargo_process("init --bin --vcs none").cwd(&path).run(); } else { - cargo_process("init --vcs none") - .env("USER", "foo") - .cwd(&path) - .run(); + cargo_process("init --vcs none").cwd(&path).run(); } assert!(paths::root().join("foo/Cargo.toml").is_file()); @@ -200,7 +187,6 @@ fs::write(path2, r#" fn qqq () { println!("Hello, world 3!"); }"#).unwrap(); cargo_process("init --vcs none") - .env("USER", "foo") .cwd(&path) .with_status(101) .with_stderr( @@ -224,7 +210,6 @@ fs::write(path2, r#"fn main () { println!("Hello, world 3!"); }"#).unwrap(); cargo_process("init --lib --vcs none") - .env("USER", "foo") .cwd(&path) .with_status(101) .with_stderr( @@ -249,10 +234,7 @@ let content = "pub fn qqq() {}"; fs::write(&sourcefile_path, content).unwrap(); - cargo_process("init --vcs none") - .env("USER", "foo") - .cwd(&path) - .run(); + cargo_process("init --vcs none").cwd(&path).run(); assert!(paths::root().join("foo/Cargo.toml").is_file()); assert!(!paths::root().join("foo/src/main.rs").is_file()); @@ -274,9 +256,7 @@ #[cargo_test] fn simple_git() { - cargo_process("init --lib --vcs git") - .env("USER", "foo") - .run(); + cargo_process("init --lib --vcs git").run(); assert!(paths::root().join("Cargo.toml").is_file()); assert!(paths::root().join("src/lib.rs").is_file()); @@ -286,7 +266,7 @@ #[cargo_test] fn auto_git() { - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join("Cargo.toml").is_file()); assert!(paths::root().join("src/lib.rs").is_file()); @@ -300,7 +280,6 @@ fs::create_dir_all(&foo).unwrap(); cargo_process("init") .cwd(foo.clone()) - .env("USER", "foo") .with_status(101) .with_stderr( "\ @@ -328,7 +307,6 @@ fs::create_dir_all(&test).unwrap(); cargo_process("init") .cwd(test.clone()) - .env("USER", "foo") .with_status(101) .with_stderr( "\ @@ -354,7 +332,7 @@ fn git_autodetect() { fs::create_dir(&paths::root().join(".git")).unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join("Cargo.toml").is_file()); assert!(paths::root().join("src/lib.rs").is_file()); @@ -366,7 +344,7 @@ fn mercurial_autodetect() { fs::create_dir(&paths::root().join(".hg")).unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join("Cargo.toml").is_file()); assert!(paths::root().join("src/lib.rs").is_file()); @@ -380,7 +358,7 @@ fs::write(&paths::root().join(".gitignore"), "qqqqqq\n").unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join("Cargo.toml").is_file()); assert!(paths::root().join("src/lib.rs").is_file()); @@ -397,7 +375,7 @@ fs::write(&paths::root().join(".gitignore"), "first").unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join(".gitignore").is_file()); @@ -409,7 +387,7 @@ fn gitignore_no_newline_in_new() { fs::create_dir(&paths::root().join(".git")).unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join(".gitignore").is_file()); @@ -423,7 +401,7 @@ fs::write(&paths::root().join(".hgignore"), "first").unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join(".hgignore").is_file()); @@ -435,7 +413,7 @@ fn mercurial_no_newline_in_new() { fs::create_dir(&paths::root().join(".hg")).unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); assert!(paths::root().join(".hgignore").is_file()); @@ -445,9 +423,7 @@ #[cargo_test] fn terminating_newline_in_new_git_ignore() { - cargo_process("init --vcs git --lib") - .env("USER", "foo") - .run(); + cargo_process("init --vcs git --lib").run(); let content = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); @@ -461,9 +437,7 @@ if !mercurial_available() { return; } - cargo_process("init --vcs hg --lib") - .env("USER", "foo") - .run(); + cargo_process("init --vcs hg --lib").run(); let content = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); @@ -477,7 +451,7 @@ fs::create_dir(&paths::root().join(".git")).unwrap(); fs::write(&paths::root().join(".gitignore"), b"first").unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); let content = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); @@ -491,7 +465,7 @@ fs::create_dir(&paths::root().join(".hg")).unwrap(); fs::write(&paths::root().join(".hgignore"), b"first").unwrap(); - cargo_process("init --lib").env("USER", "foo").run(); + cargo_process("init --lib").run(); let content = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); @@ -504,9 +478,7 @@ fn cargo_lock_gitignored_if_lib1() { fs::create_dir(&paths::root().join(".git")).unwrap(); - cargo_process("init --lib --vcs git") - .env("USER", "foo") - .run(); + cargo_process("init --lib --vcs git").run(); assert!(paths::root().join(".gitignore").is_file()); @@ -520,7 +492,7 @@ fs::write(&paths::root().join("lib.rs"), "").unwrap(); - cargo_process("init --vcs git").env("USER", "foo").run(); + cargo_process("init --vcs git").run(); assert!(paths::root().join(".gitignore").is_file()); @@ -532,9 +504,7 @@ fn cargo_lock_not_gitignored_if_bin1() { fs::create_dir(&paths::root().join(".git")).unwrap(); - cargo_process("init --vcs git --bin") - .env("USER", "foo") - .run(); + cargo_process("init --vcs git --bin").run(); assert!(paths::root().join(".gitignore").is_file()); @@ -548,7 +518,7 @@ fs::write(&paths::root().join("main.rs"), "").unwrap(); - cargo_process("init --vcs git").env("USER", "foo").run(); + cargo_process("init --vcs git").run(); assert!(paths::root().join(".gitignore").is_file()); @@ -558,9 +528,7 @@ #[cargo_test] fn with_argument() { - cargo_process("init foo --vcs none") - .env("USER", "foo") - .run(); + cargo_process("init foo --vcs none").run(); assert!(paths::root().join("foo/Cargo.toml").is_file()); } @@ -595,7 +563,6 @@ fs::write(&paths::root().join("rustfmt.toml"), "tab_spaces = 2").unwrap(); cargo_process("init --lib") - .env("USER", "foo") .with_stderr("[CREATED] library package") .run(); @@ -615,7 +582,6 @@ #[cargo_test] fn ignores_failure_to_format_source() { cargo_process("init --lib") - .env("USER", "foo") .env("PATH", "") // pretend that `rustfmt` is missing .with_stderr("[CREATED] library package") .run(); diff -Nru cargo-0.53.0/tests/testsuite/lockfile_compat.rs cargo-0.54.0/tests/testsuite/lockfile_compat.rs --- cargo-0.53.0/tests/testsuite/lockfile_compat.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/lockfile_compat.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,6 +26,8 @@ let expected_lockfile = r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "bar" version = "0.1.0" @@ -170,6 +172,8 @@ assert_lockfiles_eq( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "bar" version = "0.1.0" @@ -408,6 +412,8 @@ let expected = "\ # This file is automatically @generated by Cargo.\n# It is not intended for manual editing. +version = 3 + [[package]] name = \"bar\" version = \"0.1.0\" @@ -468,6 +474,8 @@ assert_lockfiles_eq( r#"# [..] # [..] +version = 3 + [[package]] name = "bar" version = "0.1.0" @@ -763,3 +771,75 @@ p.cargo("build --locked").run(); } + +#[cargo_test] +fn same_name_version_different_sources() { + let cksum = Package::new("foo", "0.1.0").publish(); + let (git_project, repo) = git::new_repo("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + }); + let head_id = repo.head().unwrap().target().unwrap(); + + // Lockfile was generated with Rust 1.51 + let lockfile = format!( + r#"# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "foo 0.1.0 (git+{url})", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "{cksum}" + +[[package]] +name = "foo" +version = "0.1.0" +source = "git+{url}#{sha}" +"#, + sha = head_id, + url = git_project.url(), + cksum = cksum + ); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + foo = "0.1.0" + foo2 = {{ git = '{}', package = 'foo' }} + "#, + git_project.url(), + ), + ) + .file("src/lib.rs", "") + .file("Cargo.lock", &lockfile) + .build(); + + p.cargo("build").run(); + + assert_eq!(p.read_file("Cargo.lock"), lockfile); +} diff -Nru cargo-0.53.0/tests/testsuite/main.rs cargo-0.54.0/tests/testsuite/main.rs --- cargo-0.53.0/tests/testsuite/main.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/main.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,12 +1,9 @@ -#![warn(rust_2018_idioms)] // while we're getting used to 2018 -#![cfg_attr(feature = "deny-warnings", deny(warnings))] -#![allow(clippy::blacklisted_name)] -#![allow(clippy::explicit_iter_loop)] -#![allow(clippy::redundant_closure)] -#![allow(clippy::blocks_in_if_conditions)] // clippy doesn't agree with rustfmt 😂 -#![allow(clippy::inefficient_to_string)] // this causes suggestions that result in `(*s).to_string()` +// See src/cargo/lib.rs for notes on these lint settings. +#![warn(rust_2018_idioms)] +#![allow(clippy::all)] #![warn(clippy::needless_borrow)] #![warn(clippy::redundant_clone)] +#![cfg_attr(feature = "deny-warnings", deny(warnings))] #[macro_use] extern crate cargo_test_macro; @@ -24,6 +21,7 @@ mod cache_messages; mod cargo_alias_config; mod cargo_command; +mod cargo_config; mod cargo_env_config; mod cargo_features; mod cargo_targets; diff -Nru cargo-0.53.0/tests/testsuite/message_format.rs cargo-0.54.0/tests/testsuite/message_format.rs --- cargo-0.53.0/tests/testsuite/message_format.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/message_format.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,6 @@ //! Tests for --message-format flag. -use cargo_test_support::{basic_lib_manifest, basic_manifest, is_nightly, project}; +use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; #[cargo_test] fn cannot_specify_two() { @@ -112,11 +112,6 @@ #[cargo_test] fn cargo_renders_doctests() { - if !is_nightly() { - // --error-format=short support added in 1.51 - return; - } - let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( diff -Nru cargo-0.53.0/tests/testsuite/metadata.rs cargo-0.54.0/tests/testsuite/metadata.rs --- cargo-0.53.0/tests/testsuite/metadata.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/metadata.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2358,7 +2358,7 @@ } "# .replace("$ALT_TRIPLE", alt_target) - .replace("$HOST_TRIPLE", &host_target) + .replace("$HOST_TRIPLE", host_target) .replace("$FOO_DEPS", &foo_deps.to_string()); // We're going to be checking that we don't download excessively, @@ -2483,7 +2483,7 @@ } "# .replace("$ALT_TRIPLE", alt_target) - .replace("$HOST_TRIPLE", &host_target) + .replace("$HOST_TRIPLE", host_target) .replace("$ALT_DEP", alt_dep) .replace("$CFG_DEP", cfg_dep) .replace("$HOST_DEP", host_dep) @@ -2648,7 +2648,7 @@ "metadata": null } "# - .replace("$HOST_TRIPLE", &host_target) + .replace("$HOST_TRIPLE", host_target) .replace("$HOST_DEP", host_dep) .replace("$NORMAL_DEP", normal_dep) .replace("$FOO", &foo), @@ -2749,7 +2749,7 @@ "metadata": null } "# - .replace("$HOST_TRIPLE", &host_target) + .replace("$HOST_TRIPLE", host_target) .replace("$CFG_DEP", cfg_dep) .replace("$HOST_DEP", host_dep) .replace("$NORMAL_DEP", normal_dep) diff -Nru cargo-0.53.0/tests/testsuite/multitarget.rs cargo-0.54.0/tests/testsuite/multitarget.rs --- cargo-0.53.0/tests/testsuite/multitarget.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/multitarget.rs 2021-04-27 14:35:53.000000000 +0000 @@ -36,7 +36,7 @@ .run(); assert!(p.target_bin(t1, "foo").is_file()); - assert!(p.target_bin(&t2, "foo").is_file()); + assert!(p.target_bin(t2, "foo").is_file()); } #[cargo_test] @@ -140,5 +140,5 @@ .masquerade_as_nightly_cargo() .run(); - assert!(p.target_bin(&t, "foo").is_file()); + assert!(p.target_bin(t, "foo").is_file()); } diff -Nru cargo-0.53.0/tests/testsuite/new.rs cargo-0.54.0/tests/testsuite/new.rs --- cargo-0.53.0/tests/testsuite/new.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/new.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,7 @@ //! Tests for the `cargo new` command. -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::{cargo_process, git_process}; +use cargo_test_support::cargo_process; +use cargo_test_support::paths; use std::env; use std::fs::{self, File}; @@ -15,7 +15,6 @@ #[cargo_test] fn simple_lib() { cargo_process("new --lib foo --vcs none --edition 2015") - .env("USER", "foo") .with_stderr("[CREATED] library `foo` package") .run(); @@ -44,7 +43,6 @@ #[cargo_test] fn simple_bin() { cargo_process("new --bin foo --edition 2015") - .env("USER", "foo") .with_stderr("[CREATED] binary (application) `foo` package") .run(); @@ -61,7 +59,6 @@ #[cargo_test] fn both_lib_and_bin() { cargo_process("new --lib --bin foo") - .env("USER", "foo") .with_status(101) .with_stderr("[ERROR] can't specify both lib and binary outputs") .run(); @@ -69,9 +66,7 @@ #[cargo_test] fn simple_git() { - cargo_process("new --lib foo --edition 2015") - .env("USER", "foo") - .run(); + cargo_process("new --lib foo --edition 2015").run(); assert!(paths::root().is_dir()); assert!(paths::root().join("foo/Cargo.toml").is_file()); @@ -169,7 +164,6 @@ .run(); cargo_process("new --lib incremental") - .env("USER", "foo") .with_stderr( "\ [WARNING] the name `incremental` will not support binary executables with that name, \ @@ -205,7 +199,6 @@ #[cargo_test] fn std_name() { cargo_process("new core") - .env("USER", "foo") .with_stderr( "\ [WARNING] the name `core` is part of Rust's standard library @@ -227,256 +220,6 @@ } #[cargo_test] -fn finds_author_user() { - create_empty_gitconfig(); - cargo_process("new foo").env("USER", "foo").run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["foo"]"#)); -} - -#[cargo_test] -fn author_without_user_or_email() { - create_empty_gitconfig(); - cargo_process("new foo") - .env_remove("USER") - .env_remove("USERNAME") - .env_remove("NAME") - .env_remove("EMAIL") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = []"#)); -} - -#[cargo_test] -fn finds_author_email_only() { - create_empty_gitconfig(); - cargo_process("new foo") - .env_remove("USER") - .env_remove("USERNAME") - .env_remove("NAME") - .env_remove("EMAIL") - .env("EMAIL", "baz") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = [""]"#)); -} - -#[cargo_test] -fn finds_author_user_escaped() { - create_empty_gitconfig(); - cargo_process("new foo").env("USER", "foo \"bar\"").run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["foo \"bar\""]"#)); -} - -#[cargo_test] -fn finds_author_username() { - create_empty_gitconfig(); - cargo_process("new foo") - .env_remove("USER") - .env("USERNAME", "foo") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["foo"]"#)); -} - -#[cargo_test] -fn finds_author_name() { - create_empty_gitconfig(); - cargo_process("new foo") - .env_remove("USERNAME") - .env("NAME", "foo") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["foo"]"#)); -} - -#[cargo_test] -fn finds_author_priority() { - cargo_process("new foo") - .env("USER", "bar2") - .env("EMAIL", "baz2") - .env("CARGO_NAME", "bar") - .env("CARGO_EMAIL", "baz") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -} - -#[cargo_test] -fn finds_author_email() { - create_empty_gitconfig(); - cargo_process("new foo") - .env("USER", "bar") - .env("EMAIL", "baz") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -} - -#[cargo_test] -fn finds_author_git() { - git_process("config --global user.name bar").exec().unwrap(); - git_process("config --global user.email baz") - .exec() - .unwrap(); - cargo_process("new foo").env("USER", "foo").run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -} - -#[cargo_test] -fn finds_local_author_git() { - git_process("init").exec_with_output().unwrap(); - git_process("config --global user.name foo").exec().unwrap(); - git_process("config --global user.email foo@bar") - .exec() - .unwrap(); - - // Set local git user config - git_process("config user.name bar").exec().unwrap(); - git_process("config user.email baz").exec().unwrap(); - cargo_process("init").env("USER", "foo").run(); - - let toml = paths::root().join("Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -} - -#[cargo_test] -fn finds_git_author() { - cargo_process("new foo") - .env("GIT_AUTHOR_NAME", "foo") - .env("GIT_AUTHOR_EMAIL", "gitfoo") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!( - contents.contains(r#"authors = ["foo "]"#), - "{}", - contents - ); -} - -#[cargo_test] -fn finds_git_author_in_included_config() { - let included_gitconfig = paths::root().join("foo").join(".gitconfig"); - included_gitconfig.parent().unwrap().mkdir_p(); - fs::write( - &included_gitconfig, - r#" - [user] - name = foo - email = bar - "#, - ) - .unwrap(); - - let gitconfig = paths::home().join(".gitconfig"); - fs::write( - &gitconfig, - format!( - r#" - [includeIf "gitdir/i:{}"] - path = {} - "#, - included_gitconfig - .parent() - .unwrap() - .join("") - .display() - .to_string() - .replace("\\", "/"), - included_gitconfig.display().to_string().replace("\\", "/"), - ) - .as_bytes(), - ) - .unwrap(); - - cargo_process("new foo/bar").run(); - let toml = paths::root().join("foo/bar/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!( - contents.contains(r#"authors = ["foo "]"#), - "{}", - contents - ); -} - -#[cargo_test] -fn finds_git_committer() { - create_empty_gitconfig(); - cargo_process("new foo") - .env_remove("USER") - .env("GIT_COMMITTER_NAME", "foo") - .env("GIT_COMMITTER_EMAIL", "gitfoo") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["foo "]"#)); -} - -#[cargo_test] -fn author_prefers_cargo() { - git_process("config --global user.name foo").exec().unwrap(); - git_process("config --global user.email bar") - .exec() - .unwrap(); - let root = paths::root(); - fs::create_dir(&root.join(".cargo")).unwrap(); - fs::write( - &root.join(".cargo/config"), - r#" - [cargo-new] - name = "new-foo" - email = "new-bar" - vcs = "none" - "#, - ) - .unwrap(); - - cargo_process("new foo").env("USER", "foo").run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["new-foo "]"#)); - assert!(!root.join("foo/.gitignore").exists()); -} - -#[cargo_test] -fn strip_angle_bracket_author_email() { - create_empty_gitconfig(); - cargo_process("new foo") - .env("USER", "bar") - .env("EMAIL", "") - .run(); - - let toml = paths::root().join("foo/Cargo.toml"); - let contents = fs::read_to_string(&toml).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -} - -#[cargo_test] fn git_prefers_command_line() { let root = paths::root(); fs::create_dir(&root.join(".cargo")).unwrap(); @@ -491,22 +234,23 @@ ) .unwrap(); - cargo_process("new foo --vcs git").env("USER", "foo").run(); + cargo_process("new foo --vcs git").run(); assert!(paths::root().join("foo/.gitignore").exists()); + assert!(!fs::read_to_string(paths::root().join("foo/Cargo.toml")) + .unwrap() + .contains("authors =")); } #[cargo_test] fn subpackage_no_git() { - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); assert!(paths::root().join("foo/.git").is_dir()); assert!(paths::root().join("foo/.gitignore").is_file()); let subpackage = paths::root().join("foo").join("components"); fs::create_dir(&subpackage).unwrap(); - cargo_process("new foo/components/subcomponent") - .env("USER", "foo") - .run(); + cargo_process("new foo/components/subcomponent").run(); assert!(!paths::root() .join("foo/components/subcomponent/.git") @@ -518,7 +262,7 @@ #[cargo_test] fn subpackage_git_with_gitignore() { - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); assert!(paths::root().join("foo/.git").is_dir()); assert!(paths::root().join("foo/.gitignore").is_file()); @@ -528,9 +272,7 @@ let subpackage = paths::root().join("foo/components"); fs::create_dir(&subpackage).unwrap(); - cargo_process("new foo/components/subcomponent") - .env("USER", "foo") - .run(); + cargo_process("new foo/components/subcomponent").run(); assert!(paths::root() .join("foo/components/subcomponent/.git") @@ -542,13 +284,11 @@ #[cargo_test] fn subpackage_git_with_vcs_arg() { - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); let subpackage = paths::root().join("foo").join("components"); fs::create_dir(&subpackage).unwrap(); - cargo_process("new foo/components/subcomponent --vcs git") - .env("USER", "foo") - .run(); + cargo_process("new foo/components/subcomponent --vcs git").run(); assert!(paths::root() .join("foo/components/subcomponent/.git") @@ -593,32 +333,27 @@ #[cargo_test] fn explicit_project_name() { cargo_process("new --lib foo --name bar") - .env("USER", "foo") .with_stderr("[CREATED] library `bar` package") .run(); } #[cargo_test] fn new_with_edition_2015() { - cargo_process("new --edition 2015 foo") - .env("USER", "foo") - .run(); + cargo_process("new --edition 2015 foo").run(); let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(manifest.contains("edition = \"2015\"")); } #[cargo_test] fn new_with_edition_2018() { - cargo_process("new --edition 2018 foo") - .env("USER", "foo") - .run(); + cargo_process("new --edition 2018 foo").run(); let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(manifest.contains("edition = \"2018\"")); } #[cargo_test] fn new_default_edition() { - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(manifest.contains("edition = \"2018\"")); } @@ -626,26 +361,14 @@ #[cargo_test] fn new_with_bad_edition() { cargo_process("new --edition something_else foo") - .env("USER", "foo") .with_stderr_contains("error: 'something_else' isn't a valid value[..]") .with_status(1) .run(); } #[cargo_test] -fn new_with_blank_email() { - cargo_process("new foo") - .env("CARGO_NAME", "Sen") - .env("CARGO_EMAIL", "") - .run(); - - let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); - assert!(contents.contains(r#"authors = ["Sen"]"#), "{}", contents); -} - -#[cargo_test] fn new_with_reference_link() { - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(contents.contains("# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html")) @@ -653,7 +376,7 @@ #[cargo_test] fn lockfile_constant_during_new() { - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); cargo_process("build").cwd(&paths::root().join("foo")).run(); let before = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap(); @@ -666,7 +389,6 @@ fn restricted_windows_name() { if cfg!(windows) { cargo_process("new nul") - .env("USER", "foo") .with_status(101) .with_stderr( "\ @@ -677,7 +399,6 @@ .run(); } else { cargo_process("new nul") - .env("USER", "foo") .with_stderr( "\ [WARNING] the name `nul` is a reserved Windows filename @@ -692,7 +413,6 @@ #[cargo_test] fn non_ascii_name() { cargo_process("new Привет") - .env("USER", "foo") .with_stderr( "\ [WARNING] the name `Привет` contains non-ASCII characters @@ -707,7 +427,6 @@ fn non_ascii_name_invalid() { // These are alphanumeric characters, but not Unicode XID. cargo_process("new ⒶⒷⒸ") - .env("USER", "foo") .with_status(101) .with_stderr( "\ @@ -728,7 +447,6 @@ .run(); cargo_process("new a¼") - .env("USER", "foo") .with_status(101) .with_stderr( "\ @@ -753,7 +471,7 @@ fn git_default_branch() { // Check for init.defaultBranch support. create_empty_gitconfig(); - cargo_process("new foo").env("USER", "foo").run(); + cargo_process("new foo").run(); let repo = git2::Repository::open(paths::root().join("foo")).unwrap(); let head = repo.find_reference("HEAD").unwrap(); assert_eq!(head.symbolic_target().unwrap(), "refs/heads/master"); @@ -766,7 +484,7 @@ "#, ) .unwrap(); - cargo_process("new bar").env("USER", "foo").run(); + cargo_process("new bar").run(); let repo = git2::Repository::open(paths::root().join("bar")).unwrap(); let head = repo.find_reference("HEAD").unwrap(); assert_eq!(head.symbolic_target().unwrap(), "refs/heads/hello"); diff -Nru cargo-0.53.0/tests/testsuite/offline.rs cargo-0.54.0/tests/testsuite/offline.rs --- cargo-0.53.0/tests/testsuite/offline.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/offline.rs 2021-04-27 14:35:53.000000000 +0000 @@ -332,7 +332,7 @@ } #[cargo_test] -fn update_offline() { +fn update_offline_not_cached() { let p = project() .file( "Cargo.toml", @@ -350,7 +350,15 @@ .build(); p.cargo("update --offline") .with_status(101) - .with_stderr("error: you can't update in the offline mode[..]") + .with_stderr( + "\ +[ERROR] no matching package named `bar` found +location searched: registry `[..]` +required by package `foo v0.0.1 ([..]/foo)` +As a reminder, you're using offline mode (--offline) which can sometimes cause \ +surprising resolution failures, if this error is too confusing you may wish to \ +retry without the offline flag.", + ) .run(); } @@ -562,3 +570,118 @@ p.cargo("check --offline").run(); } + +#[cargo_test] +fn update_offline_cached() { + // Cache a few versions to update against + let p = project().file("src/lib.rs", "").build(); + let versions = ["1.2.3", "1.2.5", "1.2.9"]; + for vers in versions.iter() { + Package::new("present_dep", vers) + .file("Cargo.toml", &basic_manifest("present_dep", vers)) + .file( + "src/lib.rs", + format!(r#"pub fn get_version()->&'static str {{ "{}" }}"#, vers).as_str(), + ) + .publish(); + // make package cached + p.change_file( + "Cargo.toml", + format!( + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "={}" + "#, + vers + ) + .as_str(), + ); + p.cargo("build").run(); + } + + let p2 = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "1.2" + "#, + ) + .file( + "src/main.rs", + "\ +extern crate present_dep; +fn main(){ + println!(\"{}\", present_dep::get_version()); +}", + ) + .build(); + + p2.cargo("build --offline") + .with_stderr( + "\ +[COMPILING] present_dep v1.2.9 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p2.rename_run("foo", "with_1_2_9") + .with_stdout("1.2.9") + .run(); + // updates happen without updating the index + p2.cargo("update -p present_dep --precise 1.2.3 --offline") + .with_status(0) + .with_stderr( + "\ +[UPDATING] present_dep v1.2.9 -> v1.2.3 +", + ) + .run(); + + p2.cargo("build --offline") + .with_stderr( + "\ +[COMPILING] present_dep v1.2.3 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p2.rename_run("foo", "with_1_2_3") + .with_stdout("1.2.3") + .run(); + + // Offline update should only print package details and not index updating + p2.cargo("update --offline") + .with_status(0) + .with_stderr( + "\ +[UPDATING] present_dep v1.2.3 -> v1.2.9 +", + ) + .run(); + + // No v1.2.8 loaded into the cache so expect failure. + p2.cargo("update -p present_dep --precise 1.2.8 --offline") + .with_status(101) + .with_stderr( + "\ +[ERROR] no matching package named `present_dep` found +location searched: registry `[..]` +required by package `foo v0.1.0 ([..]/foo)` +As a reminder, you're using offline mode (--offline) which can sometimes cause \ +surprising resolution failures, if this error is too confusing you may wish to \ +retry without the offline flag. +", + ) + .run(); +} diff -Nru cargo-0.53.0/tests/testsuite/old_cargos.rs cargo-0.54.0/tests/testsuite/old_cargos.rs --- cargo-0.53.0/tests/testsuite/old_cargos.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/old_cargos.rs 2021-04-27 14:35:53.000000000 +0000 @@ -10,11 +10,11 @@ //! cargo test --test testsuite -- old_cargos --nocapture --ignored //! ``` -use cargo::util::{ProcessBuilder, ProcessError}; use cargo::CargoResult; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::{self, Dependency, Package}; use cargo_test_support::{cargo_exe, execs, paths, process, project, rustc_host}; +use cargo_util::{ProcessBuilder, ProcessError}; use semver::Version; use std::fs; @@ -68,7 +68,7 @@ format!("nightly-{}", host), ]; - let output = cargo::util::process("rustup") + let output = ProcessBuilder::new("rustup") .args(&["toolchain", "list"]) .exec_with_output() .expect("rustup should be installed"); @@ -586,4 +586,61 @@ ", ) .run(); +} + +#[cargo_test] +#[ignore] +fn avoids_split_debuginfo_collision() { + // Checks for a bug where .o files were being incorrectly shared between + // different toolchains using incremental and split-debuginfo on macOS. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [profile.dev] + split-debuginfo = "unpacked" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + execs() + .with_process_builder(tc_process("cargo", "stable")) + .arg("build") + .env("CARGO_INCREMENTAL", "1") + .cwd(p.root()) + .with_stderr( + "\ +[COMPILING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("build") + .env("CARGO_INCREMENTAL", "1") + .with_stderr( + "\ +[COMPILING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); + + execs() + .with_process_builder(tc_process("cargo", "stable")) + .arg("build") + .env("CARGO_INCREMENTAL", "1") + .cwd(p.root()) + .with_stderr( + "\ +[COMPILING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); } diff -Nru cargo-0.53.0/tests/testsuite/package_features.rs cargo-0.54.0/tests/testsuite/package_features.rs --- cargo-0.53.0/tests/testsuite/package_features.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/package_features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,9 @@ //! Tests for feature selection on the command-line. use super::features2::switch_to_resolver_2; -use cargo_test_support::registry::Package; +use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{basic_manifest, project}; +use std::fmt::Write; #[cargo_test] fn virtual_no_default_features() { @@ -460,32 +461,162 @@ } #[cargo_test] -fn resolver1_non_member_optional_feature() { - // --features x/y for an optional dependency `x` with the v1 resolver. +fn non_member_feature() { + // --features for a non-member + Package::new("jazz", "1.0.0").publish(); Package::new("bar", "1.0.0") - .feature("feat1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(not(feature = "feat1"))] - compile_error!("feat1 should be activated"); - "#, - ) + .add_dep(Dependency::new("jazz", "1.0").optional(true)) .publish(); - let p = project() - .file( - "Cargo.toml", + let make_toml = |resolver, optional| { + let mut s = String::new(); + write!( + s, r#" [package] name = "foo" version = "0.1.0" + resolver = "{}" [dependencies] - bar = { version="1.0", optional=true } "#, + resolver ) + .unwrap(); + if optional { + s.push_str(r#"bar = { version = "1.0", optional = true } "#); + } else { + s.push_str(r#"bar = "1.0""#) + } + s.push('\n'); + s + }; + let p = project() + .file("Cargo.toml", &make_toml("1", false)) .file("src/lib.rs", "") .build(); + p.cargo("fetch").run(); + ///////////////////////// V1 non-optional + eprintln!("V1 non-optional"); + p.cargo("check -p bar") + .with_stderr( + "\ +[CHECKING] bar v1.0.0 +[FINISHED] [..] +", + ) + .run(); + // TODO: This should not be allowed (future warning?) + p.cargo("check --features bar/jazz") + .with_stderr( + "\ +[DOWNLOADING] crates ... +[DOWNLOADED] jazz v1.0.0 [..] +[CHECKING] jazz v1.0.0 +[CHECKING] bar v1.0.0 +[CHECKING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); + // TODO: This should not be allowed (future warning?) + p.cargo("check -p bar --features bar/jazz -v") + .with_stderr( + "\ +[FRESH] jazz v1.0.0 +[FRESH] bar v1.0.0 +[FINISHED] [..] +", + ) + .run(); + + ///////////////////////// V1 optional + eprintln!("V1 optional"); + p.change_file("Cargo.toml", &make_toml("1", true)); + + // This error isn't great, but is probably unlikely to be common in + // practice, so I'm not going to put much effort into improving it. + p.cargo("check -p bar") + .with_status(101) + .with_stderr( + "\ +error: package ID specification `bar` did not match any packages + +Did you mean `foo`? +", + ) + .run(); + + p.cargo("check -p bar --features bar -v") + .with_stderr( + "\ +[FRESH] bar v1.0.0 +[FINISHED] [..] +", + ) + .run(); + + // TODO: This should not be allowed (future warning?) + p.cargo("check -p bar --features bar/jazz -v") + .with_stderr( + "\ +[FRESH] jazz v1.0.0 +[FRESH] bar v1.0.0 +[FINISHED] [..] +", + ) + .run(); + + ///////////////////////// V2 non-optional + eprintln!("V2 non-optional"); + p.change_file("Cargo.toml", &make_toml("2", false)); + // TODO: This should not be allowed (future warning?) + p.cargo("check --features bar/jazz -v") + .with_stderr( + "\ +[FRESH] jazz v1.0.0 +[FRESH] bar v1.0.0 +[FRESH] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); + p.cargo("check -p bar -v") + .with_stderr( + "\ +[FRESH] bar v1.0.0 +[FINISHED] [..] +", + ) + .run(); + p.cargo("check -p bar --features bar/jazz") + .with_status(101) + .with_stderr("error: cannot specify features for packages outside of workspace") + .run(); + + ///////////////////////// V2 optional + eprintln!("V2 optional"); + p.change_file("Cargo.toml", &make_toml("2", true)); + p.cargo("check -p bar") + .with_status(101) + .with_stderr( + "\ +error: package ID specification `bar` did not match any packages - p.cargo("check -p bar --features bar/feat1").run(); +Did you mean `foo`? +", + ) + .run(); + // New --features behavior does not look at cwd. + p.cargo("check -p bar --features bar") + .with_status(101) + .with_stderr("error: cannot specify features for packages outside of workspace") + .run(); + p.cargo("check -p bar --features bar/jazz") + .with_status(101) + .with_stderr("error: cannot specify features for packages outside of workspace") + .run(); + p.cargo("check -p bar --features foo/bar") + .with_status(101) + .with_stderr("error: cannot specify features for packages outside of workspace") + .run(); } diff -Nru cargo-0.53.0/tests/testsuite/patch.rs cargo-0.54.0/tests/testsuite/patch.rs --- cargo-0.53.0/tests/testsuite/patch.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/patch.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2320,3 +2320,80 @@ ) .run(); } + +#[cargo_test] +fn old_git_patch() { + // Example where an old lockfile with an explicit branch="master" in Cargo.toml. + Package::new("bar", "1.0.0").publish(); + let (bar, bar_repo) = git::new_repo("bar", |p| { + p.file("Cargo.toml", &basic_manifest("bar", "1.0.0")) + .file("src/lib.rs", "") + }); + + let bar_oid = bar_repo.head().unwrap().target().unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = "1.0" + + [patch.crates-io] + bar = {{ git = "{}", branch = "master" }} + "#, + bar.url() + ), + ) + .file( + "Cargo.lock", + &format!( + r#" +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "bar" +version = "1.0.0" +source = "git+{}#{}" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar", +] + "#, + bar.url(), + bar_oid + ), + ) + .file("src/lib.rs", "") + .build(); + + bar.change_file("Cargo.toml", &basic_manifest("bar", "2.0.0")); + git::add(&bar_repo); + git::commit(&bar_repo); + + // This *should* keep the old lock. + p.cargo("tree") + // .env("CARGO_LOG", "trace") + .with_stderr( + "\ +[UPDATING] [..] +", + ) + // .with_status(1) + .with_stdout(format!( + "\ +foo v0.1.0 [..] +└── bar v1.0.0 (file:///[..]branch=master#{}) +", + &bar_oid.to_string()[..8] + )) + .run(); +} diff -Nru cargo-0.53.0/tests/testsuite/profile_config.rs cargo-0.54.0/tests/testsuite/profile_config.rs --- cargo-0.53.0/tests/testsuite/profile_config.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/profile_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -342,7 +342,7 @@ // foo -> middle -> bar -> dev // middle exists in Cargo.toml, the others in .cargo/config use super::config::ConfigBuilder; - use cargo::core::compiler::CompileMode; + use cargo::core::compiler::{CompileKind, CompileMode}; use cargo::core::profiles::{Profiles, UnitFor}; use cargo::core::{PackageId, Workspace}; use cargo::util::interning::InternedString; @@ -403,7 +403,8 @@ // normal package let mode = CompileMode::Build; - let p = profiles.get_profile(a_pkg, true, true, UnitFor::new_normal(), mode); + let kind = CompileKind::Host; + let p = profiles.get_profile(a_pkg, true, true, UnitFor::new_normal(), mode, kind); assert_eq!(p.name, "foo"); assert_eq!(p.codegen_units, Some(2)); // "foo" from config assert_eq!(p.opt_level, "1"); // "middle" from manifest @@ -412,7 +413,7 @@ assert_eq!(p.overflow_checks, true); // "dev" built-in (ignore package override) // build-override - let bo = profiles.get_profile(a_pkg, true, true, UnitFor::new_host(false), mode); + let bo = profiles.get_profile(a_pkg, true, true, UnitFor::new_host(false), mode, kind); assert_eq!(bo.name, "foo"); assert_eq!(bo.codegen_units, Some(6)); // "foo" build override from config assert_eq!(bo.opt_level, "0"); // default to zero @@ -421,7 +422,7 @@ assert_eq!(bo.overflow_checks, true); // SAME as normal // package overrides - let po = profiles.get_profile(dep_pkg, false, true, UnitFor::new_normal(), mode); + let po = profiles.get_profile(dep_pkg, false, true, UnitFor::new_normal(), mode, kind); assert_eq!(po.name, "foo"); assert_eq!(po.codegen_units, Some(7)); // "foo" package override from config assert_eq!(po.opt_level, "1"); // SAME as normal diff -Nru cargo-0.53.0/tests/testsuite/registry.rs cargo-0.54.0/tests/testsuite/registry.rs --- cargo-0.53.0/tests/testsuite/registry.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,12 @@ //! Tests for normal registry dependencies. -use cargo::{core::SourceId, util::paths::remove_dir_all}; +use cargo::core::SourceId; use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::{self, registry_path, Dependency, Package}; use cargo_test_support::{basic_manifest, project}; use cargo_test_support::{cargo_process, registry::registry_url}; use cargo_test_support::{git, install::cargo_home, t}; +use cargo_util::paths::remove_dir_all; use std::fs::{self, File}; use std::path::Path; diff -Nru cargo-0.53.0/tests/testsuite/run.rs cargo-0.54.0/tests/testsuite/run.rs --- cargo-0.53.0/tests/testsuite/run.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/run.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,7 @@ //! Tests for the `cargo run` command. -use cargo::util::paths::dylib_path_envvar; use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, project, Project}; +use cargo_util::paths::dylib_path_envvar; #[cargo_test] fn simple() { diff -Nru cargo-0.53.0/tests/testsuite/rustc_info_cache.rs cargo-0.54.0/tests/testsuite/rustc_info_cache.rs --- cargo-0.53.0/tests/testsuite/rustc_info_cache.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/rustc_info_cache.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,40 +1,40 @@ //! Tests for the cache file for the rustc version info. -use cargo_test_support::paths::CargoPathExt; +use cargo_test_support::{basic_bin_manifest, paths::CargoPathExt}; use cargo_test_support::{basic_manifest, project}; use std::env; +const MISS: &str = "[..] rustc info cache miss[..]"; +const HIT: &str = "[..]rustc info cache hit[..]"; +const UPDATE: &str = "[..]updated rustc info cache[..]"; + #[cargo_test] fn rustc_info_cache() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); - let miss = "[..] rustc info cache miss[..]"; - let hit = "[..]rustc info cache hit[..]"; - let update = "[..]updated rustc info cache[..]"; - p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .with_stderr_contains("[..]failed to read rustc info cache[..]") - .with_stderr_contains(miss) - .with_stderr_does_not_contain(hit) - .with_stderr_contains(update) + .with_stderr_contains(MISS) + .with_stderr_does_not_contain(HIT) + .with_stderr_contains(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(hit) - .with_stderr_does_not_contain(miss) - .with_stderr_does_not_contain(update) + .with_stderr_contains(HIT) + .with_stderr_does_not_contain(MISS) + .with_stderr_does_not_contain(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("CARGO_CACHE_RUSTC_INFO", "0") .with_stderr_contains("[..]rustc info cache disabled[..]") - .with_stderr_does_not_contain(update) + .with_stderr_does_not_contain(UPDATE) .run(); let other_rustc = { @@ -68,18 +68,18 @@ .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") - .with_stderr_contains(miss) - .with_stderr_does_not_contain(hit) - .with_stderr_contains(update) + .with_stderr_contains(MISS) + .with_stderr_does_not_contain(HIT) + .with_stderr_contains(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(hit) - .with_stderr_does_not_contain(miss) - .with_stderr_does_not_contain(update) + .with_stderr_contains(HIT) + .with_stderr_does_not_contain(MISS) + .with_stderr_does_not_contain(UPDATE) .run(); other_rustc.move_into_the_future(); @@ -88,17 +88,99 @@ .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") - .with_stderr_contains(miss) - .with_stderr_does_not_contain(hit) - .with_stderr_contains(update) + .with_stderr_contains(MISS) + .with_stderr_does_not_contain(HIT) + .with_stderr_contains(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(hit) - .with_stderr_does_not_contain(miss) - .with_stderr_does_not_contain(update) + .with_stderr_contains(HIT) + .with_stderr_does_not_contain(MISS) + .with_stderr_does_not_contain(UPDATE) .run(); } + +#[cargo_test] +fn rustc_info_cache_with_wrappers() { + let wrapper_project = project() + .at("wrapper") + .file("Cargo.toml", &basic_bin_manifest("wrapper")) + .file("src/main.rs", r#"fn main() { }"#) + .build(); + let wrapper = wrapper_project.bin("wrapper"); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + authors = [] + [workspace] + "#, + ) + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + for &wrapper_env in ["RUSTC_WRAPPER", "RUSTC_WORKSPACE_WRAPPER"].iter() { + p.cargo("clean").with_status(0).run(); + wrapper_project.change_file( + "src/main.rs", + r#" + fn main() { + let mut args = std::env::args_os(); + let _me = args.next().unwrap(); + let rustc = args.next().unwrap(); + let status = std::process::Command::new(rustc).args(args).status().unwrap(); + std::process::exit(if status.success() { 0 } else { 1 }) + } + "#, + ); + wrapper_project.cargo("build").with_status(0).run(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env(wrapper_env, &wrapper) + .with_stderr_contains("[..]failed to read rustc info cache[..]") + .with_stderr_contains(MISS) + .with_stderr_contains(UPDATE) + .with_stderr_does_not_contain(HIT) + .with_status(0) + .run(); + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env(wrapper_env, &wrapper) + .with_stderr_contains("[..]reusing existing rustc info cache[..]") + .with_stderr_contains(HIT) + .with_stderr_does_not_contain(UPDATE) + .with_stderr_does_not_contain(MISS) + .with_status(0) + .run(); + + wrapper_project.change_file("src/main.rs", r#"fn main() { panic!() }"#); + wrapper_project.cargo("build").with_status(0).run(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env(wrapper_env, &wrapper) + .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") + .with_stderr_contains(MISS) + .with_stderr_contains(UPDATE) + .with_stderr_does_not_contain(HIT) + .with_status(101) + .run(); + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env(wrapper_env, &wrapper) + .with_stderr_contains("[..]reusing existing rustc info cache[..]") + .with_stderr_contains(HIT) + .with_stderr_does_not_contain(UPDATE) + .with_stderr_does_not_contain(MISS) + .with_status(101) + .run(); + } +} diff -Nru cargo-0.53.0/tests/testsuite/rustdoc_extern_html.rs cargo-0.54.0/tests/testsuite/rustdoc_extern_html.rs --- cargo-0.53.0/tests/testsuite/rustdoc_extern_html.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/rustdoc_extern_html.rs 2021-04-27 14:35:53.000000000 +0000 @@ -69,7 +69,7 @@ // For local developers, skip this test if docs aren't installed. let docs = std::path::Path::new(&paths::sysroot()).join("share/doc/rust/html"); if !docs.exists() { - if cargo::util::is_ci() { + if cargo_util::is_ci() { panic!("std docs are not installed, check that the rust-docs component is installed"); } else { eprintln!( diff -Nru cargo-0.53.0/tests/testsuite/tree.rs cargo-0.54.0/tests/testsuite/tree.rs --- cargo-0.53.0/tests/testsuite/tree.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/tree.rs 2021-04-27 14:35:53.000000000 +0000 @@ -367,7 +367,7 @@ Package::new("build_target_dep", "1.0.0").publish(); Package::new("build_host_dep", "1.0.0") .target_dep("targetdep", "1.0", alternate()) - .target_dep("hostdep", "1.0", &rustc_host()) + .target_dep("hostdep", "1.0", rustc_host()) .publish(); Package::new("pm_target", "1.0.0") .proc_macro(true) diff -Nru cargo-0.53.0/tests/testsuite/update.rs cargo-0.54.0/tests/testsuite/update.rs --- cargo-0.53.0/tests/testsuite/update.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/update.rs 2021-04-27 14:35:53.000000000 +0000 @@ -584,8 +584,7 @@ let mut lines = lockfile.lines().collect::>(); lines.insert(2, "# some other comment"); let mut lockfile = lines.join("\n"); - lockfile.push_str("\n\n"); // .lines/.join loses the last newline - // >>>>>>> parent of 7dd9872c1... Change git dependencies to use `HEAD` by default + lockfile.push('\n'); // .lines/.join loses the last newline println!("saving Cargo.lock contents:\n{}", lockfile); p.change_file("Cargo.lock", &lockfile); diff -Nru cargo-0.53.0/tests/testsuite/weak_dep_features.rs cargo-0.54.0/tests/testsuite/weak_dep_features.rs --- cargo-0.53.0/tests/testsuite/weak_dep_features.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/weak_dep_features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,7 @@ //! Tests for weak-dep-features. +use super::features2::switch_to_resolver_2; +use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{project, publish}; use std::fmt::Write; @@ -272,6 +274,7 @@ .file("src/lib.rs", "") .build(); + // Does not build bar. p.cargo("check --features bar?/feat -Z weak-dep-features") .masquerade_as_nightly_cargo() .with_stderr( @@ -285,6 +288,33 @@ ) .run(); + // Builds bar. + p.cargo("check --features bar?/feat,bar -Z weak-dep-features") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[CHECKING] bar v1.0.0 +[CHECKING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); + + eprintln!("check V2 resolver"); + switch_to_resolver_2(&p); + p.build_dir().rm_rf(); + // Does not build bar. + p.cargo("check --features bar?/feat -Z weak-dep-features") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[CHECKING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); + + // Builds bar. p.cargo("check --features bar?/feat,bar -Z weak-dep-features") .masquerade_as_nightly_cargo() .with_stderr( @@ -564,6 +594,32 @@ ", ) .run(); + + p.cargo("tree -Z weak-dep-features -e features --features bar?/feat") + .masquerade_as_nightly_cargo() + .with_stdout("foo v0.1.0 ([ROOT]/foo)") + .run(); + + // This is a little strange in that it produces no output. + // Maybe `cargo tree` should print a note about why? + p.cargo("tree -Z weak-dep-features -e features -i bar --features bar?/feat") + .masquerade_as_nightly_cargo() + .with_stdout("") + .run(); + + p.cargo("tree -Z weak-dep-features -e features -i bar --features bar?/feat,bar") + .masquerade_as_nightly_cargo() + .with_stdout( + "\ +bar v1.0.0 +├── bar feature \"default\" +│ └── foo v0.1.0 ([ROOT]/foo) +│ ├── foo feature \"bar\" (command-line) +│ └── foo feature \"default\" (command-line) +└── bar feature \"feat\" (command-line) +", + ) + .run(); } #[cargo_test] diff -Nru cargo-0.53.0/tests/testsuite/workspaces.rs cargo-0.54.0/tests/testsuite/workspaces.rs --- cargo-0.53.0/tests/testsuite/workspaces.rs 2021-04-21 00:43:41.000000000 +0000 +++ cargo-0.54.0/tests/testsuite/workspaces.rs 2021-04-27 14:35:53.000000000 +0000 @@ -408,7 +408,10 @@ .with_status(101) .with_stderr( "\ -error: failed to read `[..]Cargo.toml` +[ERROR] failed to load manifest for workspace member `[..]/foo` + +Caused by: + failed to read `[..]foo/foo/Cargo.toml` Caused by: [..] @@ -1031,7 +1034,6 @@ let p = p.build(); p.cargo("new --lib bar") - .env("USER", "foo") .with_stderr( "\ warning: compiling this new package may not work due to invalid workspace configuration @@ -1053,7 +1055,6 @@ fn new_warning_with_corrupt_ws() { let p = project().file("Cargo.toml", "asdf").build(); p.cargo("new bar") - .env("USER", "foo") .with_stderr( "\ [WARNING] compiling this new package may not work due to invalid workspace configuration @@ -1871,7 +1872,10 @@ .with_status(101) .with_stderr( "\ -error: failed to read `[..]Cargo.toml` +[ERROR] failed to load manifest for workspace member `[..]/crates/bar` + +Caused by: + failed to read `[..]foo/crates/bar/Cargo.toml` Caused by: [..] @@ -2315,6 +2319,55 @@ Caused by: [..] +", + ) + .run(); +} + +#[cargo_test] +fn member_dep_missing() { + // Make sure errors are not suppressed with -q. + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -q") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to load manifest for workspace member `[..]/bar` + +Caused by: + failed to load manifest for dependency `baz` + +Caused by: + failed to read `[..]foo/bar/baz/Cargo.toml` + +Caused by: + [..] ", ) .run(); diff -Nru cargo-0.53.0/vendor/bstr/.pc/.quilt_patches cargo-0.54.0/vendor/bstr/.pc/.quilt_patches --- cargo-0.53.0/vendor/bstr/.pc/.quilt_patches 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/bstr/.pc/.quilt_patches 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -debian/patches diff -Nru cargo-0.53.0/vendor/bstr/.pc/.quilt_series cargo-0.54.0/vendor/bstr/.pc/.quilt_series --- cargo-0.53.0/vendor/bstr/.pc/.quilt_series 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/bstr/.pc/.quilt_series 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -series diff -Nru cargo-0.53.0/vendor/bstr/.pc/.version cargo-0.54.0/vendor/bstr/.pc/.version --- cargo-0.53.0/vendor/bstr/.pc/.version 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/bstr/.pc/.version 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -2 diff -Nru cargo-0.53.0/vendor/cc/.pc/.quilt_patches cargo-0.54.0/vendor/cc/.pc/.quilt_patches --- cargo-0.53.0/vendor/cc/.pc/.quilt_patches 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/vendor/cc/.pc/.quilt_patches 2021-10-26 21:08:52.000000000 +0000 @@ -0,0 +1 @@ +debian/patches diff -Nru cargo-0.53.0/vendor/cc/.pc/.quilt_series cargo-0.54.0/vendor/cc/.pc/.quilt_series --- cargo-0.53.0/vendor/cc/.pc/.quilt_series 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/vendor/cc/.pc/.quilt_series 2021-10-26 21:08:52.000000000 +0000 @@ -0,0 +1 @@ +series diff -Nru cargo-0.53.0/vendor/cc/.pc/.version cargo-0.54.0/vendor/cc/.pc/.version --- cargo-0.53.0/vendor/cc/.pc/.version 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/vendor/cc/.pc/.version 2021-10-26 21:08:52.000000000 +0000 @@ -0,0 +1 @@ +2 diff -Nru cargo-0.53.0/vendor/env_logger/.pc/.quilt_patches cargo-0.54.0/vendor/env_logger/.pc/.quilt_patches --- cargo-0.53.0/vendor/env_logger/.pc/.quilt_patches 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/env_logger/.pc/.quilt_patches 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -debian/patches diff -Nru cargo-0.53.0/vendor/env_logger/.pc/.quilt_series cargo-0.54.0/vendor/env_logger/.pc/.quilt_series --- cargo-0.53.0/vendor/env_logger/.pc/.quilt_series 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/env_logger/.pc/.quilt_series 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -series diff -Nru cargo-0.53.0/vendor/env_logger/.pc/.version cargo-0.54.0/vendor/env_logger/.pc/.version --- cargo-0.53.0/vendor/env_logger/.pc/.version 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/env_logger/.pc/.version 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -2 diff -Nru cargo-0.53.0/vendor/ignore/.pc/.quilt_patches cargo-0.54.0/vendor/ignore/.pc/.quilt_patches --- cargo-0.53.0/vendor/ignore/.pc/.quilt_patches 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ignore/.pc/.quilt_patches 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -debian/patches diff -Nru cargo-0.53.0/vendor/ignore/.pc/.quilt_series cargo-0.54.0/vendor/ignore/.pc/.quilt_series --- cargo-0.53.0/vendor/ignore/.pc/.quilt_series 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ignore/.pc/.quilt_series 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -series diff -Nru cargo-0.53.0/vendor/ignore/.pc/.version cargo-0.54.0/vendor/ignore/.pc/.version --- cargo-0.53.0/vendor/ignore/.pc/.version 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ignore/.pc/.version 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -2 diff -Nru cargo-0.53.0/vendor/libc/.cargo-checksum.json cargo-0.54.0/vendor/libc/.cargo-checksum.json --- cargo-0.53.0/vendor/libc/.cargo-checksum.json 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/.cargo-checksum.json 2021-10-26 21:08:52.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"7b2f96d100e1cf1929e7719b7edb3b90ab5298072638fccd77be9ce942ecdfce"} \ No newline at end of file +{"files":{},"package":"869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013"} \ No newline at end of file diff -Nru cargo-0.53.0/vendor/libc/Cargo.toml cargo-0.54.0/vendor/libc/Cargo.toml --- cargo-0.53.0/vendor/libc/Cargo.toml 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/Cargo.toml 2021-10-26 21:08:52.000000000 +0000 @@ -11,7 +11,7 @@ [package] name = "libc" -version = "0.2.104" +version = "0.2.105" authors = ["The Rust Project Developers"] build = "build.rs" exclude = ["/ci/*", "/.github/*", "/.cirrus.yml", "/triagebot.toml"] diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/apple/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/apple/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/apple/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/apple/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -52,7 +52,7 @@ pub type host_info64_t = *mut integer_t; pub type processor_flavor_t = ::c_int; pub type thread_flavor_t = natural_t; -pub type thread_inspect_t = mach_port_t; +pub type thread_inspect_t = ::mach_port_t; pub type policy_t = ::c_int; pub type mach_vm_address_t = u64; pub type mach_vm_offset_t = u64; @@ -90,7 +90,7 @@ pub type thread_extended_info_t = *mut thread_extended_info; pub type thread_extended_info_data_t = thread_extended_info; -pub type thread_t = mach_port_t; +pub type thread_t = ::mach_port_t; pub type thread_policy_flavor_t = natural_t; pub type thread_policy_t = *mut integer_t; pub type thread_latency_qos_t = integer_t; @@ -120,7 +120,7 @@ pub type vm_statistics64_t = *mut vm_statistics64; pub type vm_statistics64_data_t = vm_statistics64; -pub type task_t = mach_port_t; +pub type task_t = ::mach_port_t; pub type sysdir_search_path_enumeration_state = ::c_uint; @@ -4302,6 +4302,9 @@ pub const PROC_PIDTHREADINFO: ::c_int = 5; pub const PROC_PIDVNODEPATHINFO: ::c_int = 9; pub const PROC_PIDPATHINFO_MAXSIZE: ::c_int = 4096; +pub const PROC_CSM_ALL: ::c_uint = 0x0001; +pub const PROC_CSM_NOSMT: ::c_uint = 0x0002; +pub const PROC_CSM_TECS: ::c_uint = 0x0004; pub const MAXCOMLEN: usize = 16; pub const MAXTHREADNAMESIZE: usize = 64; @@ -4873,6 +4876,9 @@ thread: ::pthread_t, key: ::pthread_key_t, ) -> *mut ::c_void; + pub fn pthread_jit_write_protect_np(enabled: ::c_int); + pub fn pthread_jit_write_protect_supported_np() -> ::c_int; + pub fn pthread_cpu_number_np(cpu_number_out: *mut ::size_t) -> ::c_int; pub fn thread_policy_set( thread: thread_t, @@ -5246,6 +5252,12 @@ pub fn proc_kmsgbuf(buffer: *mut ::c_void, buffersize: u32) -> ::c_int; pub fn proc_libversion(major: *mut ::c_int, mintor: *mut ::c_int) -> ::c_int; pub fn proc_pid_rusage(pid: ::c_int, flavor: ::c_int, buffer: *mut rusage_info_t) -> ::c_int; + + // Available from Big Sur + pub fn proc_set_no_smt() -> ::c_int; + pub fn proc_setthread_no_smt() -> ::c_int; + pub fn proc_set_csm(flags: u32) -> ::c_int; + pub fn proc_setthread_csm(flags: u32) -> ::c_int; /// # Notes /// /// `id` is of type [`uuid_t`]. @@ -5293,11 +5305,14 @@ out_processor_infoCnt: *mut mach_msg_type_number_t, ) -> ::kern_return_t; - pub static mut mach_task_self_: mach_port_t; - pub fn task_for_pid(host: mach_port_t, pid: ::pid_t, task: *mut mach_port_t) - -> ::kern_return_t; + pub static mut mach_task_self_: ::mach_port_t; + pub fn task_for_pid( + host: ::mach_port_t, + pid: ::pid_t, + task: *mut ::mach_port_t, + ) -> ::kern_return_t; pub fn task_info( - host: mach_port_t, + host: ::mach_port_t, flavor: task_flavor_t, task_info_out: task_info_t, task_info_count: *mut mach_msg_type_number_t, @@ -5322,7 +5337,7 @@ pub static vm_page_size: vm_size_t; } -pub unsafe fn mach_task_self() -> mach_port_t { +pub unsafe fn mach_task_self() -> ::mach_port_t { mach_task_self_ } diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/dragonfly/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/dragonfly/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/dragonfly/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/dragonfly/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -715,6 +715,9 @@ #[deprecated(since = "0.2.64", note = "Not stable across OS versions")] pub const RLIM_NLIMITS: ::rlim_t = 12; +#[deprecated(since = "0.2.105", note = "Only exists on FreeBSD, not DragonFly BSD")] +pub const XU_NGROUPS: ::c_int = 16; + pub const Q_GETQUOTA: ::c_int = 0x300; pub const Q_SETQUOTA: ::c_int = 0x400; diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -191,6 +191,7 @@ pub const ELAST: ::c_int = 96; pub const RAND_MAX: ::c_int = 0x7fff_fffd; +pub const KI_NSPARE_PTR: usize = 6; extern "C" { // Return type ::c_int was removed in FreeBSD 12 diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -26,6 +26,16 @@ pub udata: *mut ::c_void, pub ext: [u64; 4], } + + pub struct kvm_page { + pub version: ::c_uint, + pub paddr: ::c_ulong, + pub kmap_vaddr: ::c_ulong, + pub dmap_vaddr: ::c_ulong, + pub prot: ::vm_prot_t, + pub offset: ::u_long, + pub len: ::size_t, + } } s_no_extra_traits! { @@ -190,28 +200,13 @@ } } -pub const F_ADD_SEALS: ::c_int = 19; -pub const F_GET_SEALS: ::c_int = 20; -pub const F_SEAL_SEAL: ::c_int = 0x0001; -pub const F_SEAL_SHRINK: ::c_int = 0x0002; -pub const F_SEAL_GROW: ::c_int = 0x0004; -pub const F_SEAL_WRITE: ::c_int = 0x0008; - -pub const GRND_NONBLOCK: ::c_uint = 0x1; -pub const GRND_RANDOM: ::c_uint = 0x2; - pub const RAND_MAX: ::c_int = 0x7fff_fffd; - -pub const PROC_ASLR_CTL: ::c_int = 13; -pub const PROC_ASLR_STATUS: ::c_int = 14; - -pub const PROC_PROCCTL_MD_MIN: ::c_int = 0x10000000; - -pub const SO_DOMAIN: ::c_int = 0x1019; - -pub const EINTEGRITY: ::c_int = 97; pub const ELAST: ::c_int = 97; +/// max length of devicename +pub const SPECNAMELEN: ::c_int = 63; +pub const KI_NSPARE_PTR: usize = 6; + extern "C" { pub fn setgrent(); pub fn mprotect(addr: *mut ::c_void, len: ::size_t, prot: ::c_int) -> ::c_int; diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -4,6 +4,9 @@ pub type dev_t = u64; pub type ino_t = ::c_ulong; pub type shmatt_t = ::c_uint; +pub type kpaddr_t = u64; +pub type kssize_t = i64; +pub type domainset_t = __c_anonymous_domainset; s! { pub struct shmid_ds { @@ -37,6 +40,20 @@ pub sc_ngroups: ::c_int, pub sc_groups: [::gid_t; 1], } + + pub struct kvm_page { + pub kp_version: ::u_int, + pub kp_paddr: ::kpaddr_t, + pub kp_kmap_vaddr: ::kvaddr_t, + pub kp_dmap_vaddr: ::kvaddr_t, + pub kp_prot: ::vm_prot_t, + pub kp_offset: ::off_t, + pub kp_len: ::size_t, + } + + pub struct __c_anonymous_domainset { + _priv: [::uintptr_t; 4], + } } s_no_extra_traits! { @@ -201,32 +218,21 @@ } } -pub const F_ADD_SEALS: ::c_int = 19; -pub const F_GET_SEALS: ::c_int = 20; -pub const F_SEAL_SEAL: ::c_int = 0x0001; -pub const F_SEAL_SHRINK: ::c_int = 0x0002; -pub const F_SEAL_GROW: ::c_int = 0x0004; -pub const F_SEAL_WRITE: ::c_int = 0x0008; - -pub const GRND_NONBLOCK: ::c_uint = 0x1; -pub const GRND_RANDOM: ::c_uint = 0x2; - pub const RAND_MAX: ::c_int = 0x7fff_ffff; - -pub const SO_DOMAIN: ::c_int = 0x1019; - -pub const EINTEGRITY: ::c_int = 97; pub const ELAST: ::c_int = 97; -pub const GRND_INSECURE: ::c_uint = 0x4; -pub const PROC_ASLR_CTL: ::c_int = 13; -pub const PROC_ASLR_STATUS: ::c_int = 14; -pub const PROC_PROTMAX_CTL: ::c_int = 15; -pub const PROC_PROTMAX_STATUS: ::c_int = 16; -pub const PROC_PROCCTL_MD_MIN: ::c_int = 0x10000000; +pub const KF_TYPE_EVENTFD: ::c_int = 13; -pub const LOCAL_CREDS_PERSISTENT: ::c_int = 3; -pub const SCM_CREDS2: ::c_int = 0x08; +/// max length of devicename +pub const SPECNAMELEN: ::c_int = 255; +pub const KI_NSPARE_PTR: usize = 5; + +/// domainset policies +pub const DOMAINSET_POLICY_INVALID: ::c_int = 0; +pub const DOMAINSET_POLICY_ROUNDROBIN: ::c_int = 1; +pub const DOMAINSET_POLICY_FIRSTTOUCH: ::c_int = 2; +pub const DOMAINSET_POLICY_PREFER: ::c_int = 3; +pub const DOMAINSET_POLICY_INTERLEAVE: ::c_int = 4; f! { pub fn SOCKCRED2SIZE(ngrps: usize) -> usize { @@ -267,6 +273,28 @@ pub fn setproctitle_fast(fmt: *const ::c_char, ...); pub fn timingsafe_bcmp(a: *const ::c_void, b: *const ::c_void, len: ::size_t) -> ::c_int; pub fn timingsafe_memcmp(a: *const ::c_void, b: *const ::c_void, len: ::size_t) -> ::c_int; + + pub fn cpuset_getdomain( + level: ::cpulevel_t, + which: ::cpuwhich_t, + id: ::id_t, + setsize: ::size_t, + mask: *mut ::domainset_t, + policy: *mut ::c_int, + ) -> ::c_int; + pub fn cpuset_setdomain( + level: ::cpulevel_t, + which: ::cpuwhich_t, + id: ::id_t, + setsize: ::size_t, + mask: *const ::domainset_t, + policy: ::c_int, + ) -> ::c_int; +} + +#[link(name = "kvm")] +extern "C" { + pub fn kvm_kerndisp(kd: *mut ::kvm_t) -> ::kssize_t; } cfg_if! { diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/freebsd/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1,7 +1,13 @@ pub type fflags_t = u32; pub type clock_t = i32; -pub type lwpid_t = i32; +pub type vm_prot_t = u_char; +pub type kvaddr_t = u64; +pub type segsz_t = isize; +pub type __fixpt_t = u32; +pub type fixpt_t = __fixpt_t; +pub type __lwpid_t = i32; +pub type lwpid_t = __lwpid_t; pub type blksize_t = i32; pub type clockid_t = ::c_int; pub type sem_t = _sem; @@ -26,6 +32,14 @@ pub type pthread_barrier_t = *mut __c_anonymous_pthread_barrier; pub type uuid_t = ::uuid; +pub type u_int = ::c_uint; +pub type u_char = ::c_uchar; +pub type u_long = ::c_ulong; +pub type u_short = ::c_ushort; + +// It's an alias over "struct __kvm_t". However, its fields aren't supposed to be used directly, +// making the type definition system dependent. Better not bind it exactly. +pub type kvm_t = ::c_void; s! { pub struct aiocb { @@ -218,10 +232,6 @@ pub kve_path: [[::c_char; 32]; 32], } - pub struct kinfo_proc { - __pad0: [[::uintptr_t; 17]; 8], - } - pub struct filestat { fs_type: ::c_int, fs_flags: ::c_int, @@ -259,6 +269,164 @@ pub struct __c_anonymous__timer { _priv: [::c_int; 3], } + + /// Used to hold a copy of the command line, if it had a sane length. + pub struct pargs { + /// Reference count. + pub ar_ref: u_int, + /// Length. + pub ar_length: u_int, + /// Arguments. + pub ar_args: [::c_uchar; 1], + } + + pub struct priority { + /// Scheduling class. + pub pri_class: u_char, + /// Normal priority level. + pub pri_level: u_char, + /// Priority before propagation. + pub pri_native: u_char, + /// User priority based on p_cpu and p_nice. + pub pri_user: u_char, + } + + pub struct kinfo_proc { + pub ki_structsize: ::c_int, + pub ki_layout: ::c_int, + pub ki_args: *mut pargs, + // This is normally "struct proc". + pub ki_paddr: *mut ::c_void, + // This is normally "struct user". + pub ki_addr: *mut ::c_void, + // This is normally "struct vnode". + pub ki_tracep: *mut ::c_void, + // This is normally "struct vnode". + pub ki_textvp: *mut ::c_void, + // This is normally "struct filedesc". + pub ki_fd: *mut ::c_void, + // This is normally "struct vmspace". + pub ki_vmspace: *mut ::c_void, + #[cfg(freebsd13)] + pub ki_wchan: *const ::c_void, + #[cfg(not(freebsd13))] + pub ki_wchan: *mut ::c_void, + pub ki_pid: ::pid_t, + pub ki_ppid: ::pid_t, + pub ki_pgid: ::pid_t, + pub ki_tpgid: ::pid_t, + pub ki_sid: ::pid_t, + pub ki_tsid: ::pid_t, + pub ki_jobc: ::c_short, + pub ki_spare_short1: ::c_short, + #[cfg(any(freebsd12, freebsd13))] + pub ki_tdev_freebsd11: u32, + #[cfg(freebsd11)] + pub ki_tdev: ::dev_t, + pub ki_siglist: ::sigset_t, + pub ki_sigmask: ::sigset_t, + pub ki_sigignore: ::sigset_t, + pub ki_sigcatch: ::sigset_t, + pub ki_uid: ::uid_t, + pub ki_ruid: ::uid_t, + pub ki_svuid: ::uid_t, + pub ki_rgid: ::gid_t, + pub ki_svgid: ::gid_t, + pub ki_ngroups: ::c_short, + pub ki_spare_short2: ::c_short, + pub ki_groups: [::gid_t; ::KI_NGROUPS], + pub ki_size: ::vm_size_t, + pub ki_rssize: segsz_t, + pub ki_swrss: segsz_t, + pub ki_tsize: segsz_t, + pub ki_dsize: segsz_t, + pub ki_ssize: segsz_t, + pub ki_xstat: ::u_short, + pub ki_acflag: ::u_short, + pub ki_pctcpu: fixpt_t, + pub ki_estcpu: u_int, + pub ki_slptime: u_int, + pub ki_swtime: u_int, + pub ki_cow: u_int, + pub ki_runtime: u64, + pub ki_start: ::timeval, + pub ki_childtime: ::timeval, + pub ki_flag: ::c_long, + pub ki_kiflag: ::c_long, + pub ki_traceflag: ::c_int, + pub ki_stat: ::c_char, + pub ki_nice: i8, // signed char + pub ki_lock: ::c_char, + pub ki_rqindex: ::c_char, + pub ki_oncpu_old: ::c_uchar, + pub ki_lastcpu_old: ::c_uchar, + pub ki_tdname: [::c_char; TDNAMLEN + 1], + pub ki_wmesg: [::c_char; ::WMESGLEN + 1], + pub ki_login: [::c_char; ::LOGNAMELEN + 1], + pub ki_lockname: [::c_char; ::LOCKNAMELEN + 1], + pub ki_comm: [::c_char; ::COMMLEN + 1], + pub ki_emul: [::c_char; ::KI_EMULNAMELEN + 1], + pub ki_loginclass: [::c_char; ::LOGINCLASSLEN + 1], + pub ki_moretdname: [::c_char; ::MAXCOMLEN - ::TDNAMLEN + 1], + pub ki_sparestrings: [[::c_char; 23]; 2], // little hack to allow PartialEq + pub ki_spareints: [::c_int; ::KI_NSPARE_INT], + #[cfg(freebsd13)] + pub ki_tdev: u64, + #[cfg(freebsd12)] + pub ki_tdev: ::dev_t, + pub ki_oncpu: ::c_int, + pub ki_lastcpu: ::c_int, + pub ki_tracer: ::c_int, + pub ki_flag2: ::c_int, + pub ki_fibnum: ::c_int, + pub ki_cr_flags: u_int, + pub ki_jid: ::c_int, + pub ki_numthreads: ::c_int, + pub ki_tid: lwpid_t, + pub ki_pri: priority, + pub ki_rusage: ::rusage, + pub ki_rusage_ch: ::rusage, + // This is normally "struct pcb". + pub ki_pcb: *mut ::c_void, + pub ki_kstack: *mut ::c_void, + pub ki_udata: *mut ::c_void, + // This is normally "struct thread". + pub ki_tdaddr: *mut ::c_void, + // This is normally "struct pwddesc". + #[cfg(freebsd13)] + pub ki_pd: *mut ::c_void, + pub ki_spareptrs: [*mut ::c_void; ::KI_NSPARE_PTR], + pub ki_sparelongs: [::c_long; ::KI_NSPARE_LONG], + pub ki_sflag: ::c_long, + pub ki_tdflags: ::c_long, + } + + pub struct kvm_swap { + pub ksw_devname: [::c_char; 32], + pub ksw_used: u_int, + pub ksw_total: u_int, + pub ksw_flags: ::c_int, + pub ksw_reserved1: u_int, + pub ksw_reserved2: u_int, + } + + pub struct nlist { + /// symbol name (in memory) + pub n_name: *const ::c_char, + /// type defines + pub n_type: ::c_uchar, + /// "type" and binding information + pub n_other: ::c_char, + /// used by stab entries + pub n_desc: ::c_short, + pub n_value: ::c_ulong, + } + + pub struct kvm_nlist { + pub n_name: *const ::c_char, + pub n_type: ::c_uchar, + pub n_value: ::kvaddr_t, + } } s_no_extra_traits! { @@ -609,6 +777,8 @@ pub const PTHREAD_STACK_MIN: ::size_t = MINSIGSTKSZ; pub const PTHREAD_MUTEX_ADAPTIVE_NP: ::c_int = 4; +pub const PTHREAD_MUTEX_STALLED: ::c_int = 0; +pub const PTHREAD_MUTEX_ROBUST: ::c_int = 1; pub const SIGSTKSZ: ::size_t = MINSIGSTKSZ + 32768; pub const SF_NODISKIO: ::c_int = 0x00000001; pub const SF_MNOWAIT: ::c_int = 0x00000002; @@ -626,6 +796,7 @@ pub const ECAPMODE: ::c_int = 94; pub const ENOTRECOVERABLE: ::c_int = 95; pub const EOWNERDEAD: ::c_int = 96; +pub const EINTEGRITY: ::c_int = 97; pub const RLIMIT_NPTS: ::c_int = 11; pub const RLIMIT_SWAP: ::c_int = 12; pub const RLIMIT_KQUEUES: ::c_int = 13; @@ -640,6 +811,8 @@ pub const NI_DGRAM: ::c_int = 0x00000010; pub const NI_NUMERICSCOPE: ::c_int = 0x00000020; +pub const XU_NGROUPS: ::c_int = 16; + pub const Q_GETQUOTA: ::c_int = 0x700; pub const Q_SETQUOTA: ::c_int = 0x800; @@ -714,7 +887,6 @@ pub const NOTE_NSECONDS: u32 = 0x00000008; pub const MADV_PROTECT: ::c_int = 10; -pub const RUSAGE_THREAD: ::c_int = 1; #[doc(hidden)] #[deprecated( @@ -901,6 +1073,8 @@ pub const MNT_EXPUBLIC: ::c_int = 0x20000000; pub const MNT_NONBUSY: ::c_int = 0x04000000; +pub const SCM_CREDS2: ::c_int = 0x08; + pub const SO_BINTIME: ::c_int = 0x2000; pub const SO_NO_OFFLOAD: ::c_int = 0x4000; pub const SO_NO_DDP: ::c_int = 0x8000; @@ -914,9 +1088,11 @@ pub const SO_USER_COOKIE: ::c_int = 0x1015; pub const SO_PROTOCOL: ::c_int = 0x1016; pub const SO_PROTOTYPE: ::c_int = SO_PROTOCOL; +pub const SO_DOMAIN: ::c_int = 0x1019; pub const SO_VENDOR: ::c_int = 0x80000000; pub const LOCAL_CREDS: ::c_int = 2; +pub const LOCAL_CREDS_PERSISTENT: ::c_int = 3; pub const LOCAL_CONNWAIT: ::c_int = 4; pub const LOCAL_VENDOR: ::c_int = SO_VENDOR; @@ -965,8 +1141,13 @@ pub const PROC_TRAPCAP_STATUS: ::c_int = 10; pub const PROC_PDEATHSIG_CTL: ::c_int = 11; pub const PROC_PDEATHSIG_STATUS: ::c_int = 12; +pub const PROC_ASLR_CTL: ::c_int = 13; +pub const PROC_ASLR_STATUS: ::c_int = 14; +pub const PROC_PROTMAX_CTL: ::c_int = 15; +pub const PROC_PROTMAX_STATUS: ::c_int = 16; pub const PROC_STACKGAP_CTL: ::c_int = 17; pub const PROC_STACKGAP_STATUS: ::c_int = 18; +pub const PROC_PROCCTL_MD_MIN: ::c_int = 0x10000000; pub const AF_SLOW: ::c_int = 33; pub const AF_SCLUSTER: ::c_int = 34; @@ -1433,14 +1614,28 @@ pub const UF_HIDDEN: ::c_ulong = 0x00008000; pub const SF_SNAPSHOT: ::c_ulong = 0x00200000; +// fcntl commands +pub const F_ADD_SEALS: ::c_int = 19; +pub const F_DUP2FD: ::c_int = 10; +pub const F_DUP2FD_CLOEXEC: ::c_int = 18; +pub const F_GET_SEALS: ::c_int = 20; pub const F_OGETLK: ::c_int = 7; pub const F_OSETLK: ::c_int = 8; pub const F_OSETLKW: ::c_int = 9; -pub const F_DUP2FD: ::c_int = 10; -pub const F_SETLK_REMOTE: ::c_int = 14; -pub const F_READAHEAD: ::c_int = 15; pub const F_RDAHEAD: ::c_int = 16; -pub const F_DUP2FD_CLOEXEC: ::c_int = 18; +pub const F_READAHEAD: ::c_int = 15; +pub const F_SETLK_REMOTE: ::c_int = 14; + +// for use with F_ADD_SEALS +pub const F_SEAL_GROW: ::c_int = 4; +pub const F_SEAL_SEAL: ::c_int = 1; +pub const F_SEAL_SHRINK: ::c_int = 2; +pub const F_SEAL_WRITE: ::c_int = 8; + +// For getrandom() +pub const GRND_NONBLOCK: ::c_uint = 0x1; +pub const GRND_RANDOM: ::c_uint = 0x2; +pub const GRND_INSECURE: ::c_uint = 0x4; // For realhostname* api pub const HOSTNAME_FOUND: ::c_int = 0; @@ -1461,6 +1656,241 @@ pub const MALLOCX_ZERO: ::c_int = 0x40; +/// size of returned wchan message +pub const WMESGLEN: usize = 8; +/// size of returned lock name +pub const LOCKNAMELEN: usize = 8; +/// size of returned thread name +pub const TDNAMLEN: usize = 16; +/// size of returned ki_comm name +pub const COMMLEN: usize = 19; +/// size of returned ki_emul +pub const KI_EMULNAMELEN: usize = 16; +/// number of groups in ki_groups +pub const KI_NGROUPS: usize = 16; +cfg_if! { + if #[cfg(freebsd11)] { + pub const KI_NSPARE_INT: usize = 4; + } else { + pub const KI_NSPARE_INT: usize = 2; + } +} +pub const KI_NSPARE_LONG: usize = 12; +/// Flags for the process credential. +pub const KI_CRF_CAPABILITY_MODE: usize = 0x00000001; +/// Steal a bit from ki_cr_flags to indicate that the cred had more than +/// KI_NGROUPS groups. +pub const KI_CRF_GRP_OVERFLOW: usize = 0x80000000; +/// controlling tty vnode active +pub const KI_CTTY: usize = 0x00000001; +/// session leader +pub const KI_SLEADER: usize = 0x00000002; +/// proc blocked on lock ki_lockname +pub const KI_LOCKBLOCK: usize = 0x00000004; +/// size of returned ki_login +pub const LOGNAMELEN: usize = 17; +/// size of returned ki_loginclass +pub const LOGINCLASSLEN: usize = 17; + +pub const KF_ATTR_VALID: ::c_int = 0x0001; +pub const KF_TYPE_NONE: ::c_int = 0; +pub const KF_TYPE_VNODE: ::c_int = 1; +pub const KF_TYPE_SOCKET: ::c_int = 2; +pub const KF_TYPE_PIPE: ::c_int = 3; +pub const KF_TYPE_FIFO: ::c_int = 4; +pub const KF_TYPE_KQUEUE: ::c_int = 5; +pub const KF_TYPE_MQUEUE: ::c_int = 7; +pub const KF_TYPE_SHM: ::c_int = 8; +pub const KF_TYPE_SEM: ::c_int = 9; +pub const KF_TYPE_PTS: ::c_int = 10; +pub const KF_TYPE_PROCDESC: ::c_int = 11; +pub const KF_TYPE_DEV: ::c_int = 12; +pub const KF_TYPE_UNKNOWN: ::c_int = 255; + +pub const KF_VTYPE_VNON: ::c_int = 0; +pub const KF_VTYPE_VREG: ::c_int = 1; +pub const KF_VTYPE_VDIR: ::c_int = 2; +pub const KF_VTYPE_VBLK: ::c_int = 3; +pub const KF_VTYPE_VCHR: ::c_int = 4; +pub const KF_VTYPE_VLNK: ::c_int = 5; +pub const KF_VTYPE_VSOCK: ::c_int = 6; +pub const KF_VTYPE_VFIFO: ::c_int = 7; +pub const KF_VTYPE_VBAD: ::c_int = 8; +pub const KF_VTYPE_UNKNOWN: ::c_int = 255; + +/// Current working directory +pub const KF_FD_TYPE_CWD: ::c_int = -1; +/// Root directory +pub const KF_FD_TYPE_ROOT: ::c_int = -2; +/// Jail directory +pub const KF_FD_TYPE_JAIL: ::c_int = -3; +/// Ktrace vnode +pub const KF_FD_TYPE_TRACE: ::c_int = -4; +pub const KF_FD_TYPE_TEXT: ::c_int = -5; +/// Controlling terminal +pub const KF_FD_TYPE_CTTY: ::c_int = -6; +pub const KF_FLAG_READ: ::c_int = 0x00000001; +pub const KF_FLAG_WRITE: ::c_int = 0x00000002; +pub const KF_FLAG_APPEND: ::c_int = 0x00000004; +pub const KF_FLAG_ASYNC: ::c_int = 0x00000008; +pub const KF_FLAG_FSYNC: ::c_int = 0x00000010; +pub const KF_FLAG_NONBLOCK: ::c_int = 0x00000020; +pub const KF_FLAG_DIRECT: ::c_int = 0x00000040; +pub const KF_FLAG_HASLOCK: ::c_int = 0x00000080; +pub const KF_FLAG_SHLOCK: ::c_int = 0x00000100; +pub const KF_FLAG_EXLOCK: ::c_int = 0x00000200; +pub const KF_FLAG_NOFOLLOW: ::c_int = 0x00000400; +pub const KF_FLAG_CREAT: ::c_int = 0x00000800; +pub const KF_FLAG_TRUNC: ::c_int = 0x00001000; +pub const KF_FLAG_EXCL: ::c_int = 0x00002000; +pub const KF_FLAG_EXEC: ::c_int = 0x00004000; + +pub const KVME_TYPE_NONE: ::c_int = 0; +pub const KVME_TYPE_DEFAULT: ::c_int = 1; +pub const KVME_TYPE_VNODE: ::c_int = 2; +pub const KVME_TYPE_SWAP: ::c_int = 3; +pub const KVME_TYPE_DEVICE: ::c_int = 4; +pub const KVME_TYPE_PHYS: ::c_int = 5; +pub const KVME_TYPE_DEAD: ::c_int = 6; +pub const KVME_TYPE_SG: ::c_int = 7; +pub const KVME_TYPE_MGTDEVICE: ::c_int = 8; +// Present in `sys/user.h` but is undefined for whatever reason... +// pub const KVME_TYPE_GUARD: ::c_int = 9; +pub const KVME_TYPE_UNKNOWN: ::c_int = 255; +pub const KVME_PROT_READ: ::c_int = 0x00000001; +pub const KVME_PROT_WRITE: ::c_int = 0x00000002; +pub const KVME_PROT_EXEC: ::c_int = 0x00000004; +pub const KVME_FLAG_COW: ::c_int = 0x00000001; +pub const KVME_FLAG_NEEDS_COPY: ::c_int = 0x00000002; +pub const KVME_FLAG_NOCOREDUMP: ::c_int = 0x00000004; +pub const KVME_FLAG_SUPER: ::c_int = 0x00000008; +pub const KVME_FLAG_GROWS_UP: ::c_int = 0x00000010; +pub const KVME_FLAG_GROWS_DOWN: ::c_int = 0x00000020; +cfg_if! { + if #[cfg(any(freebsd12, freebsd13))] { + pub const KVME_FLAG_USER_WIRED: ::c_int = 0x00000040; + } +} + +pub const KKST_MAXLEN: ::c_int = 1024; +/// Stack is valid. +pub const KKST_STATE_STACKOK: ::c_int = 0; +/// Stack swapped out. +pub const KKST_STATE_SWAPPED: ::c_int = 1; +pub const KKST_STATE_RUNNING: ::c_int = 2; + +// Constants about priority. +pub const PRI_MIN: ::c_int = 0; +pub const PRI_MAX: ::c_int = 255; +pub const PRI_MIN_ITHD: ::c_int = PRI_MIN; +pub const PRI_MAX_ITHD: ::c_int = PRI_MIN_REALTIME - 1; +pub const PI_REALTIME: ::c_int = PRI_MIN_ITHD + 0; +pub const PI_AV: ::c_int = PRI_MIN_ITHD + 4; +pub const PI_NET: ::c_int = PRI_MIN_ITHD + 8; +pub const PI_DISK: ::c_int = PRI_MIN_ITHD + 12; +pub const PI_TTY: ::c_int = PRI_MIN_ITHD + 16; +pub const PI_DULL: ::c_int = PRI_MIN_ITHD + 20; +pub const PI_SOFT: ::c_int = PRI_MIN_ITHD + 24; +pub const PRI_MIN_REALTIME: ::c_int = 48; +pub const PRI_MAX_REALTIME: ::c_int = PRI_MIN_KERN - 1; +pub const PRI_MIN_KERN: ::c_int = 80; +pub const PRI_MAX_KERN: ::c_int = PRI_MIN_TIMESHARE - 1; +pub const PSWP: ::c_int = PRI_MIN_KERN + 0; +pub const PVM: ::c_int = PRI_MIN_KERN + 4; +pub const PINOD: ::c_int = PRI_MIN_KERN + 8; +pub const PRIBIO: ::c_int = PRI_MIN_KERN + 12; +pub const PVFS: ::c_int = PRI_MIN_KERN + 16; +pub const PZERO: ::c_int = PRI_MIN_KERN + 20; +pub const PSOCK: ::c_int = PRI_MIN_KERN + 24; +pub const PWAIT: ::c_int = PRI_MIN_KERN + 28; +pub const PLOCK: ::c_int = PRI_MIN_KERN + 32; +pub const PPAUSE: ::c_int = PRI_MIN_KERN + 36; +pub const PRI_MIN_TIMESHARE: ::c_int = 120; +pub const PRI_MAX_TIMESHARE: ::c_int = PRI_MIN_IDLE - 1; +pub const PUSER: ::c_int = PRI_MIN_TIMESHARE; +pub const PRI_MIN_IDLE: ::c_int = 224; +pub const PRI_MAX_IDLE: ::c_int = PRI_MAX; + +// Resource utilization information. +pub const RUSAGE_THREAD: ::c_int = 1; + +cfg_if! { + if #[cfg(any(freebsd11, target_pointer_width = "32"))] { + pub const ARG_MAX: ::c_int = 256 * 1024; + } else { + pub const ARG_MAX: ::c_int = 2 * 256 * 1024; + } +} +pub const CHILD_MAX: ::c_int = 40; +/// max command name remembered +pub const MAXCOMLEN: usize = 19; +/// max interpreter file name length +pub const MAXINTERP: ::c_int = ::PATH_MAX; +/// max login name length (incl. NUL) +pub const MAXLOGNAME: ::c_int = 33; +/// max simultaneous processes +pub const MAXUPRC: ::c_int = CHILD_MAX; +/// max bytes for an exec function +pub const NCARGS: ::c_int = ARG_MAX; +/// /* max number groups +pub const NGROUPS: ::c_int = NGROUPS_MAX + 1; +/// max open files per process +pub const NOFILE: ::c_int = OPEN_MAX; +/// marker for empty group set member +pub const NOGROUP: ::c_int = 65535; +/// max hostname size +pub const MAXHOSTNAMELEN: ::c_int = 256; +/// max bytes in term canon input line +pub const MAX_CANON: ::c_int = 255; +/// max bytes in terminal input +pub const MAX_INPUT: ::c_int = 255; +/// max bytes in a file name +pub const NAME_MAX: ::c_int = 255; +pub const MAXSYMLINKS: ::c_int = 32; +/// max supplemental group id's +pub const NGROUPS_MAX: ::c_int = 1023; +/// max open files per process +pub const OPEN_MAX: ::c_int = 64; + +pub const _POSIX_ARG_MAX: ::c_int = 4096; +pub const _POSIX_LINK_MAX: ::c_int = 8; +pub const _POSIX_MAX_CANON: ::c_int = 255; +pub const _POSIX_MAX_INPUT: ::c_int = 255; +pub const _POSIX_NAME_MAX: ::c_int = 14; +pub const _POSIX_PIPE_BUF: ::c_int = 512; +pub const _POSIX_SSIZE_MAX: ::c_int = 32767; +pub const _POSIX_STREAM_MAX: ::c_int = 8; + +/// max ibase/obase values in bc(1) +pub const BC_BASE_MAX: ::c_int = 99; +/// max array elements in bc(1) +pub const BC_DIM_MAX: ::c_int = 2048; +/// max scale value in bc(1) +pub const BC_SCALE_MAX: ::c_int = 99; +/// max const string length in bc(1) +pub const BC_STRING_MAX: ::c_int = 1000; +/// max character class name size +pub const CHARCLASS_NAME_MAX: ::c_int = 14; +/// max weights for order keyword +pub const COLL_WEIGHTS_MAX: ::c_int = 10; +/// max expressions nested in expr(1) +pub const EXPR_NEST_MAX: ::c_int = 32; +/// max bytes in an input line +pub const LINE_MAX: ::c_int = 2048; +/// max RE's in interval notation +pub const RE_DUP_MAX: ::c_int = 255; + +pub const _POSIX2_BC_BASE_MAX: ::c_int = 99; +pub const _POSIX2_BC_DIM_MAX: ::c_int = 2048; +pub const _POSIX2_BC_SCALE_MAX: ::c_int = 99; +pub const _POSIX2_BC_STRING_MAX: ::c_int = 1000; +pub const _POSIX2_CHARCLASS_NAME_MAX: ::c_int = 14; +pub const _POSIX2_COLL_WEIGHTS_MAX: ::c_int = 2; +pub const _POSIX2_EQUIV_CLASS_MAX: ::c_int = 2; +pub const _POSIX2_EXPR_NEST_MAX: ::c_int = 32; +pub const _POSIX2_LINE_MAX: ::c_int = 2048; +pub const _POSIX2_RE_DUP_MAX: ::c_int = 255; + const_fn! { {const} fn _ALIGN(p: usize) -> usize { (p + _ALIGNBYTES) & !_ALIGNBYTES @@ -1700,11 +2130,6 @@ msgflg: ::c_int, ) -> ::c_int; pub fn cfmakesane(termios: *mut ::termios); - pub fn fexecve( - fd: ::c_int, - argv: *const *const ::c_char, - envp: *const *const ::c_char, - ) -> ::c_int; pub fn pdfork(fdp: *mut ::c_int, flags: ::c_int) -> ::pid_t; pub fn pdgetpid(fd: ::c_int, pidp: *mut ::pid_t) -> ::c_int; @@ -1806,6 +2231,17 @@ cpusetp: *const cpuset_t, ) -> ::c_int; + pub fn pthread_mutex_consistent(mutex: *mut ::pthread_mutex_t) -> ::c_int; + + pub fn pthread_mutexattr_getrobust( + attr: *mut ::pthread_mutexattr_t, + robust: *mut ::c_int, + ) -> ::c_int; + pub fn pthread_mutexattr_setrobust( + attr: *mut ::pthread_mutexattr_t, + robust: ::c_int, + ) -> ::c_int; + pub fn pthread_spin_init(lock: *mut pthread_spinlock_t, pshared: ::c_int) -> ::c_int; pub fn pthread_spin_destroy(lock: *mut pthread_spinlock_t) -> ::c_int; pub fn pthread_spin_lock(lock: *mut pthread_spinlock_t) -> ::c_int; @@ -1915,6 +2351,77 @@ pub fn nallocx(size: ::size_t, flags: ::c_int) -> ::size_t; pub fn procctl(idtype: ::idtype_t, id: ::id_t, cmd: ::c_int, data: *mut ::c_void) -> ::c_int; + + pub fn getpagesize() -> ::c_int; +} + +#[link(name = "kvm")] +extern "C" { + pub fn kvm_open( + execfile: *const ::c_char, + corefile: *const ::c_char, + swapfile: *const ::c_char, + flags: ::c_int, + errstr: *const ::c_char, + ) -> *mut kvm_t; + pub fn kvm_close(kd: *mut kvm_t) -> ::c_int; + pub fn kvm_dpcpu_setcpu(kd: *mut kvm_t, cpu: ::c_uint) -> ::c_int; + pub fn kvm_getargv(kd: *mut kvm_t, p: *const kinfo_proc, nchr: ::c_int) -> *mut *mut ::c_char; + pub fn kvm_getcptime(kd: *mut kvm_t, cp_time: *mut ::c_long) -> ::c_int; + pub fn kvm_getenvv(kd: *mut kvm_t, p: *const kinfo_proc, nchr: ::c_int) -> *mut *mut ::c_char; + pub fn kvm_geterr(kd: *mut kvm_t) -> *mut ::c_char; + pub fn kvm_getloadavg(kd: *mut kvm_t, loadavg: *mut ::c_double, nelem: ::c_int) -> ::c_int; + pub fn kvm_getmaxcpu(kd: *mut kvm_t) -> ::c_int; + pub fn kvm_getncpus(kd: *mut kvm_t) -> ::c_int; + pub fn kvm_getpcpu(kd: *mut kvm_t, cpu: ::c_int) -> *mut ::c_void; + pub fn kvm_counter_u64_fetch(kd: *mut kvm_t, base: ::c_ulong) -> u64; + pub fn kvm_getprocs( + kd: *mut kvm_t, + op: ::c_int, + arg: ::c_int, + cnt: *mut ::c_int, + ) -> *mut kinfo_proc; + pub fn kvm_getswapinfo( + kd: *mut kvm_t, + info: *mut kvm_swap, + maxswap: ::c_int, + flags: ::c_int, + ) -> ::c_int; + pub fn kvm_native(kd: *mut kvm_t) -> ::c_int; + pub fn kvm_nlist(kd: *mut kvm_t, nl: *mut nlist) -> ::c_int; + pub fn kvm_nlist2(kd: *mut kvm_t, nl: *mut kvm_nlist) -> ::c_int; + pub fn kvm_openfiles( + execfile: *const ::c_char, + corefile: *const ::c_char, + swapfile: *const ::c_char, + flags: ::c_int, + errbuf: *mut ::c_char, + ) -> *mut kvm_t; + pub fn kvm_read( + kd: *mut kvm_t, + addr: ::c_ulong, + buf: *mut ::c_void, + nbytes: ::size_t, + ) -> ::ssize_t; + pub fn kvm_read_zpcpu( + kd: *mut kvm_t, + base: ::c_ulong, + buf: *mut ::c_void, + size: ::size_t, + cpu: ::c_int, + ) -> ::ssize_t; + pub fn kvm_read2( + kd: *mut kvm_t, + addr: kvaddr_t, + buf: *mut ::c_void, + nbytes: ::size_t, + ) -> ::ssize_t; + pub fn kvm_write( + kd: *mut kvm_t, + addr: ::c_ulong, + buf: *const ::c_void, + nbytes: ::size_t, + ) -> ::ssize_t; } #[link(name = "util")] diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/freebsdlike/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/freebsdlike/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1167,7 +1167,6 @@ pub const NI_MAXHOST: ::size_t = 1025; -pub const XU_NGROUPS: ::c_int = 16; pub const XUCRED_VERSION: ::c_uint = 0; pub const RTLD_LOCAL: ::c_int = 0; @@ -1442,6 +1441,11 @@ pub fn duplocale(base: ::locale_t) -> ::locale_t; pub fn endutxent(); pub fn fchflags(fd: ::c_int, flags: ::c_ulong) -> ::c_int; + pub fn fexecve( + fd: ::c_int, + argv: *const *const ::c_char, + envp: *const *const ::c_char, + ) -> ::c_int; pub fn futimens(fd: ::c_int, times: *const ::timespec) -> ::c_int; pub fn getdomainname(name: *mut ::c_char, len: ::c_int) -> ::c_int; pub fn getgrent_r( diff -Nru cargo-0.53.0/vendor/libc/src/unix/bsd/netbsdlike/netbsd/mod.rs cargo-0.54.0/vendor/libc/src/unix/bsd/netbsdlike/netbsd/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/bsd/netbsdlike/netbsd/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/bsd/netbsdlike/netbsd/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1957,6 +1957,14 @@ pub const PT_GET_PROCESS_STATE: ::c_int = 18; pub const PT_FIRSTMACH: ::c_int = 32; +pub const POSIX_SPAWN_RESETIDS: ::c_int = 0x01; +pub const POSIX_SPAWN_SETPGROUP: ::c_int = 0x02; +pub const POSIX_SPAWN_SETSCHEDPARAM: ::c_int = 0x04; +pub const POSIX_SPAWN_SETSCHEDULER: ::c_int = 0x08; +pub const POSIX_SPAWN_SETSIGDEF: ::c_int = 0x10; +pub const POSIX_SPAWN_SETSIGMASK: ::c_int = 0x20; +pub const POSIX_SPAWN_RETURNERROR: ::c_int = 0x40; + // Flags for chflags(2) pub const SF_SNAPSHOT: ::c_ulong = 0x00200000; pub const SF_LOG: ::c_ulong = 0x00400000; diff -Nru cargo-0.53.0/vendor/libc/src/unix/haiku/mod.rs cargo-0.54.0/vendor/libc/src/unix/haiku/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/haiku/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/haiku/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1318,6 +1318,16 @@ pub const PRIO_PGRP: ::c_int = 1; pub const PRIO_USER: ::c_int = 2; +// utmpx entry types +pub const EMPTY: ::c_short = 0; +pub const BOOT_TIME: ::c_short = 1; +pub const OLD_TIME: ::c_short = 2; +pub const NEW_TIME: ::c_short = 3; +pub const USER_PROCESS: ::c_short = 4; +pub const INIT_PROCESS: ::c_short = 5; +pub const LOGIN_PROCESS: ::c_short = 6; +pub const DEAD_PROCESS: ::c_short = 7; + pub const LOG_PID: ::c_int = 1 << 12; pub const LOG_CONS: ::c_int = 2 << 12; pub const LOG_ODELAY: ::c_int = 4 << 12; @@ -1622,6 +1632,12 @@ ) -> ::pid_t; pub fn sethostname(name: *const ::c_char, len: ::size_t) -> ::c_int; pub fn uname(buf: *mut ::utsname) -> ::c_int; + pub fn getutxent() -> *mut utmpx; + pub fn getutxid(ut: *const utmpx) -> *mut utmpx; + pub fn getutxline(ut: *const utmpx) -> *mut utmpx; + pub fn pututxline(ut: *const utmpx) -> *mut utmpx; + pub fn setutxent(); + pub fn endutxent(); } cfg_if! { diff -Nru cargo-0.53.0/vendor/libc/src/unix/linux_like/android/mod.rs cargo-0.54.0/vendor/libc/src/unix/linux_like/android/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/linux_like/android/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/linux_like/android/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -2447,6 +2447,8 @@ pub const SCHED_RESET_ON_FORK: ::c_int = 0x40000000; +pub const CLONE_PIDFD: ::c_int = 0x1000; + // bits/seek_constants.h pub const SEEK_DATA: ::c_int = 3; pub const SEEK_HOLE: ::c_int = 4; @@ -2480,6 +2482,12 @@ } } + pub fn CPU_ALLOC_SIZE(count: ::c_int) -> ::size_t { + let _dummy: cpu_set_t = ::mem::zeroed(); + let size_in_bits = 8 * ::mem::size_of_val(&_dummy.__bits[0]); + ((count as ::size_t + size_in_bits - 1) / 8) as ::size_t + } + pub fn CPU_ZERO(cpuset: &mut cpu_set_t) -> () { for slot in cpuset.__bits.iter_mut() { *slot = 0; @@ -2487,28 +2495,44 @@ } pub fn CPU_SET(cpu: usize, cpuset: &mut cpu_set_t) -> () { - let size_in___bits = 8 * ::mem::size_of_val(&cpuset.__bits[0]); - let (idx, offset) = (cpu / size_in___bits, cpu % size_in___bits); + let size_in_bits + = 8 * ::mem::size_of_val(&cpuset.__bits[0]); // 32, 64 etc + let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits); cpuset.__bits[idx] |= 1 << offset; () } pub fn CPU_CLR(cpu: usize, cpuset: &mut cpu_set_t) -> () { - let size_in___bits = 8 * ::mem::size_of_val(&cpuset.__bits[0]); - let (idx, offset) = (cpu / size_in___bits, cpu % size_in___bits); + let size_in_bits + = 8 * ::mem::size_of_val(&cpuset.__bits[0]); // 32, 64 etc + let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits); cpuset.__bits[idx] &= !(1 << offset); () } pub fn CPU_ISSET(cpu: usize, cpuset: &cpu_set_t) -> bool { - let size_in___bits = 8 * ::mem::size_of_val(&cpuset.__bits[0]); - let (idx, offset) = (cpu / size_in___bits, cpu % size_in___bits); + let size_in_bits = 8 * ::mem::size_of_val(&cpuset.__bits[0]); + let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits); 0 != (cpuset.__bits[idx] & (1 << offset)) } + pub fn CPU_COUNT_S(size: usize, cpuset: &cpu_set_t) -> ::c_int { + let mut s: u32 = 0; + let size_of_mask = ::mem::size_of_val(&cpuset.__bits[0]); + for i in cpuset.__bits[..(size / size_of_mask)].iter() { + s += i.count_ones(); + }; + s as ::c_int + } + + pub fn CPU_COUNT(cpuset: &cpu_set_t) -> ::c_int { + CPU_COUNT_S(::mem::size_of::(), cpuset) + } + pub fn CPU_EQUAL(set1: &cpu_set_t, set2: &cpu_set_t) -> bool { set1.__bits == set2.__bits } + pub fn major(dev: ::dev_t) -> ::c_int { ((dev >> 8) & 0xfff) as ::c_int } diff -Nru cargo-0.53.0/vendor/libc/src/unix/linux_like/linux/gnu/mod.rs cargo-0.54.0/vendor/libc/src/unix/linux_like/linux/gnu/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/linux_like/linux/gnu/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/linux_like/linux/gnu/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -956,6 +956,25 @@ pub const TIOCM_CD: ::c_int = TIOCM_CAR; pub const TIOCM_RI: ::c_int = TIOCM_RNG; +// elf.h +pub const NT_PRSTATUS: ::c_int = 1; +pub const NT_PRFPREG: ::c_int = 2; +pub const NT_FPREGSET: ::c_int = 2; +pub const NT_PRPSINFO: ::c_int = 3; +pub const NT_PRXREG: ::c_int = 4; +pub const NT_TASKSTRUCT: ::c_int = 4; +pub const NT_PLATFORM: ::c_int = 5; +pub const NT_AUXV: ::c_int = 6; +pub const NT_GWINDOWS: ::c_int = 7; +pub const NT_ASRS: ::c_int = 8; +pub const NT_PSTATUS: ::c_int = 10; +pub const NT_PSINFO: ::c_int = 13; +pub const NT_PRCRED: ::c_int = 14; +pub const NT_UTSNAME: ::c_int = 15; +pub const NT_LWPSTATUS: ::c_int = 16; +pub const NT_LWPSINFO: ::c_int = 17; +pub const NT_PRFPXREG: ::c_int = 20; + // linux/keyctl.h pub const KEYCTL_DH_COMPUTE: u32 = 23; pub const KEYCTL_PKEY_QUERY: u32 = 24; diff -Nru cargo-0.53.0/vendor/libc/src/unix/linux_like/linux/mod.rs cargo-0.54.0/vendor/libc/src/unix/linux_like/linux/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/linux_like/linux/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/linux_like/linux/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1491,6 +1491,8 @@ pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 1; pub const PTHREAD_MUTEX_ERRORCHECK: ::c_int = 2; pub const PTHREAD_MUTEX_DEFAULT: ::c_int = PTHREAD_MUTEX_NORMAL; +pub const PTHREAD_MUTEX_STALLED: ::c_int = 0; +pub const PTHREAD_MUTEX_ROBUST: ::c_int = 1; pub const PTHREAD_PROCESS_PRIVATE: ::c_int = 0; pub const PTHREAD_PROCESS_SHARED: ::c_int = 1; pub const __SIZEOF_PTHREAD_COND_T: usize = 48; @@ -1507,6 +1509,8 @@ pub const SCHED_RESET_ON_FORK: ::c_int = 0x40000000; +pub const CLONE_PIDFD: ::c_int = 0x1000; + // netinet/in.h // NOTE: These are in addition to the constants defined in src/unix/mod.rs @@ -1816,6 +1820,10 @@ pub const MFD_ALLOW_SEALING: ::c_uint = 0x0002; pub const MFD_HUGETLB: ::c_uint = 0x0004; +// linux/close_range.h +pub const CLOSE_RANGE_UNSHARE: ::c_uint = 1 << 1; +pub const CLOSE_RANGE_CLOEXEC: ::c_uint = 1 << 2; + // these are used in the p_type field of Elf32_Phdr and Elf64_Phdr, which has // the type Elf32Word and Elf64Word respectively. Luckily, both of those are u32 // so we can use that type here to avoid having to cast. @@ -2388,6 +2396,8 @@ pub const NETLINK_LISTEN_ALL_NSID: ::c_int = 8; pub const NETLINK_LIST_MEMBERSHIPS: ::c_int = 9; pub const NETLINK_CAP_ACK: ::c_int = 10; +pub const NETLINK_EXT_ACK: ::c_int = 11; +pub const NETLINK_GET_STRICT_CHK: ::c_int = 12; pub const NLA_F_NESTED: ::c_int = 1 << 15; pub const NLA_F_NET_BYTEORDER: ::c_int = 1 << 14; @@ -3619,6 +3629,7 @@ timeout: *const ::timespec, sigmask: *const sigset_t, ) -> ::c_int; + pub fn pthread_mutex_consistent(mutex: *mut pthread_mutex_t) -> ::c_int; pub fn pthread_mutex_timedlock( lock: *mut pthread_mutex_t, abstime: *const ::timespec, @@ -3734,6 +3745,14 @@ attr: *const pthread_mutexattr_t, pshared: *mut ::c_int, ) -> ::c_int; + pub fn pthread_mutexattr_getrobust( + attr: *const pthread_mutexattr_t, + robustness: *mut ::c_int, + ) -> ::c_int; + pub fn pthread_mutexattr_setrobust( + attr: *mut pthread_mutexattr_t, + robustness: ::c_int, + ) -> ::c_int; pub fn popen(command: *const c_char, mode: *const c_char) -> *mut ::FILE; pub fn faccessat( dirfd: ::c_int, diff -Nru cargo-0.53.0/vendor/libc/src/unix/linux_like/linux/uclibc/x86_64/mod.rs cargo-0.54.0/vendor/libc/src/unix/linux_like/linux/uclibc/x86_64/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/linux_like/linux/uclibc/x86_64/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/linux_like/linux/uclibc/x86_64/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -292,8 +292,13 @@ } // constants +pub const ENAMETOOLONG: ::c_int = 36; // File name too long +pub const ENOTEMPTY: ::c_int = 39; // Directory not empty +pub const ELOOP: ::c_int = 40; // Too many symbolic links encountered pub const EADDRINUSE: ::c_int = 98; // Address already in use pub const EADDRNOTAVAIL: ::c_int = 99; // Cannot assign requested address +pub const ENETDOWN: ::c_int = 100; // Network is down +pub const ENETUNREACH: ::c_int = 101; // Network is unreachable pub const ECONNABORTED: ::c_int = 103; // Software caused connection abort pub const ECONNREFUSED: ::c_int = 111; // Connection refused pub const ECONNRESET: ::c_int = 104; // Connection reset by peer @@ -301,6 +306,9 @@ pub const ENOSYS: ::c_int = 38; // Function not implemented pub const ENOTCONN: ::c_int = 107; // Transport endpoint is not connected pub const ETIMEDOUT: ::c_int = 110; // connection timed out +pub const ESTALE: ::c_int = 116; // Stale file handle +pub const EHOSTUNREACH: ::c_int = 113; // No route to host +pub const EDQUOT: ::c_int = 122; // Quota exceeded pub const EOPNOTSUPP: ::c_int = 0x5f; pub const ENODATA: ::c_int = 0x3d; pub const O_APPEND: ::c_int = 02000; diff -Nru cargo-0.53.0/vendor/libc/src/unix/redox/mod.rs cargo-0.54.0/vendor/libc/src/unix/redox/mod.rs --- cargo-0.53.0/vendor/libc/src/unix/redox/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/libc/src/unix/redox/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1001,6 +1001,8 @@ set: *const ::sigset_t, oldset: *mut ::sigset_t, ) -> ::c_int; + pub fn pthread_cancel(thread: ::pthread_t) -> ::c_int; + pub fn pthread_kill(thread: ::pthread_t, sig: ::c_int) -> ::c_int; // sys/epoll.h pub fn epoll_create(size: ::c_int) -> ::c_int; diff -Nru cargo-0.53.0/vendor/opener/debian/patches/disable-vendor.patch cargo-0.54.0/vendor/opener/debian/patches/disable-vendor.patch --- cargo-0.53.0/vendor/opener/debian/patches/disable-vendor.patch 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/debian/patches/disable-vendor.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ ---- a/src/lib.rs -+++ b/src/lib.rs -@@ -14,7 +14,7 @@ - //! - //! ## Platform Implementation Details - //! On Windows the `ShellExecuteW` Windows API function is used. On Mac the system `open` command is --//! used. On other platforms, the `xdg-open` script is used. The system `xdg-open` is not used; -+//! used. On other platforms, the system `xdg-open` script is used. - //! instead a version is embedded within this library. - - #![warn( -@@ -167,14 +167,12 @@ - - #[cfg(not(any(target_os = "windows", target_os = "macos")))] - fn open_sys(path: &OsStr) -> Result<(), OpenError> { -- const XDG_OPEN_SCRIPT: &[u8] = include_bytes!("xdg-open"); -- - open_not_windows( -- "sh", -+ "xdg-open", - path, -- &["-s"], -- Some(XDG_OPEN_SCRIPT), -- "xdg-open (internal)", -+ &[], -+ None, -+ "xdg-open", - ) - } - diff -Nru cargo-0.53.0/vendor/opener/debian/patches/series cargo-0.54.0/vendor/opener/debian/patches/series --- cargo-0.53.0/vendor/opener/debian/patches/series 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/debian/patches/series 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -disable-vendor.patch diff -Nru cargo-0.53.0/vendor/opener/.pc/applied-patches cargo-0.54.0/vendor/opener/.pc/applied-patches --- cargo-0.53.0/vendor/opener/.pc/applied-patches 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/.pc/applied-patches 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -disable-vendor.patch diff -Nru cargo-0.53.0/vendor/opener/.pc/disable-vendor.patch/src/lib.rs cargo-0.54.0/vendor/opener/.pc/disable-vendor.patch/src/lib.rs --- cargo-0.53.0/vendor/opener/.pc/disable-vendor.patch/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/.pc/disable-vendor.patch/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,225 +0,0 @@ -//! This crate provides the [`open`] function, which opens a file or link with the default program -//! configured on the system. -//! -//! ```no_run -//! # fn main() -> Result<(), ::opener::OpenError> { -//! // open a website -//! opener::open("https://www.rust-lang.org")?; -//! -//! // open a file -//! opener::open("../Cargo.toml")?; -//! # Ok(()) -//! # } -//! ``` -//! -//! ## Platform Implementation Details -//! On Windows the `ShellExecuteW` Windows API function is used. On Mac the system `open` command is -//! used. On other platforms, the `xdg-open` script is used. The system `xdg-open` is not used; -//! instead a version is embedded within this library. - -#![warn( - rust_2018_idioms, - deprecated_in_future, - macro_use_extern_crate, - missing_debug_implementations, - unused_labels, - unused_qualifications, - clippy::cast_possible_truncation -)] - -#[cfg(target_os = "windows")] -use crate::windows::open_sys; - -use std::{ - error::Error, - ffi::OsStr, - fmt::{self, Display, Formatter}, - io, - process::ExitStatus, -}; - -/// An error type representing the failure to open a path. Possibly returned by the [`open`] -/// function. -/// -/// The `ExitStatus` variant will never be returned on Windows. -#[derive(Debug)] -pub enum OpenError { - /// An IO error occurred. - Io(io::Error), - - /// The command exited with a non-zero exit status. - ExitStatus { - /// A string that identifies the command. - cmd: &'static str, - - /// The failed process's exit status. - status: ExitStatus, - - /// Anything the process wrote to stderr. - stderr: String, - }, -} - -impl Display for OpenError { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - OpenError::Io(_) => { - write!(f, "IO error")?; - } - OpenError::ExitStatus { - cmd, - status, - stderr, - } => { - write!( - f, - "command '{}' did not execute successfully; {}", - cmd, status - )?; - - let stderr = stderr.trim(); - if !stderr.is_empty() { - write!(f, "\ncommand stderr:\n{}", stderr)?; - } - } - } - - Ok(()) - } -} - -impl Error for OpenError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - OpenError::Io(inner) => Some(inner), - OpenError::ExitStatus { .. } => None, - } - } -} - -impl From for OpenError { - fn from(err: io::Error) -> Self { - OpenError::Io(err) - } -} - -/// Opens a file or link with the system default program. -/// -/// Note that a path like "rustup.rs" could potentially refer to either a file or a website. If you -/// want to open the website, you should add the "http://" prefix, for example. -/// -/// Also note that a result of `Ok(())` just means a way of opening the path was found, and no error -/// occurred as a direct result of opening the path. Errors beyond that point aren't caught. For -/// example, `Ok(())` would be returned even if a file was opened with a program that can't read the -/// file, or a dead link was opened in a browser. -pub fn open

(path: P) -> Result<(), OpenError> -where - P: AsRef, -{ - open_sys(path.as_ref()) -} - -#[cfg(target_os = "windows")] -mod windows { - use super::OpenError; - use std::{ffi::OsStr, io, os::windows::ffi::OsStrExt, ptr}; - use winapi::{ctypes::c_int, um::shellapi::ShellExecuteW}; - - pub fn open_sys(path: &OsStr) -> Result<(), OpenError> { - const SW_SHOW: c_int = 5; - - let path = convert_path(path)?; - let operation: Vec = OsStr::new("open\0").encode_wide().collect(); - let result = unsafe { - ShellExecuteW( - ptr::null_mut(), - operation.as_ptr(), - path.as_ptr(), - ptr::null(), - ptr::null(), - SW_SHOW, - ) - }; - if result as c_int > 32 { - Ok(()) - } else { - Err(io::Error::last_os_error().into()) - } - } - - fn convert_path(path: &OsStr) -> io::Result> { - let mut maybe_result: Vec = path.encode_wide().collect(); - if maybe_result.iter().any(|&u| u == 0) { - return Err(io::Error::new( - io::ErrorKind::InvalidInput, - "path contains NUL byte(s)", - )); - } - maybe_result.push(0); - Ok(maybe_result) - } -} - -#[cfg(target_os = "macos")] -fn open_sys(path: &OsStr) -> Result<(), OpenError> { - open_not_windows("open", path, &[], None, "open") -} - -#[cfg(not(any(target_os = "windows", target_os = "macos")))] -fn open_sys(path: &OsStr) -> Result<(), OpenError> { - const XDG_OPEN_SCRIPT: &[u8] = include_bytes!("xdg-open"); - - open_not_windows( - "sh", - path, - &["-s"], - Some(XDG_OPEN_SCRIPT), - "xdg-open (internal)", - ) -} - -#[cfg(not(target_os = "windows"))] -fn open_not_windows( - cmd: &str, - path: &OsStr, - extra_args: &[&str], - piped_input: Option<&[u8]>, - cmd_friendly_name: &'static str, -) -> Result<(), OpenError> { - use std::{ - io::{Read, Write}, - process::{Command, Stdio}, - }; - - let stdin_type = if piped_input.is_some() { - Stdio::piped() - } else { - Stdio::null() - }; - - let mut cmd = Command::new(cmd) - .args(extra_args) - .arg(path) - .stdin(stdin_type) - .stdout(Stdio::null()) - .stderr(Stdio::piped()) - .spawn()?; - - if let Some(stdin) = cmd.stdin.as_mut() { - stdin.write_all(piped_input.unwrap())?; - } - - let exit_status = cmd.wait()?; - if exit_status.success() { - Ok(()) - } else { - let mut stderr = String::new(); - cmd.stderr.as_mut().unwrap().read_to_string(&mut stderr)?; - - Err(OpenError::ExitStatus { - cmd: cmd_friendly_name, - status: exit_status, - stderr, - }) - } -} diff -Nru cargo-0.53.0/vendor/opener/.pc/.quilt_patches cargo-0.54.0/vendor/opener/.pc/.quilt_patches --- cargo-0.53.0/vendor/opener/.pc/.quilt_patches 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/.pc/.quilt_patches 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -debian/patches diff -Nru cargo-0.53.0/vendor/opener/.pc/.quilt_series cargo-0.54.0/vendor/opener/.pc/.quilt_series --- cargo-0.53.0/vendor/opener/.pc/.quilt_series 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/.pc/.quilt_series 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -series diff -Nru cargo-0.53.0/vendor/opener/.pc/.version cargo-0.54.0/vendor/opener/.pc/.version --- cargo-0.53.0/vendor/opener/.pc/.version 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/.pc/.version 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -2 diff -Nru cargo-0.53.0/vendor/opener/src/lib.rs cargo-0.54.0/vendor/opener/src/lib.rs --- cargo-0.53.0/vendor/opener/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/opener/src/lib.rs 2021-10-26 21:08:52.000000000 +0000 @@ -14,7 +14,7 @@ //! //! ## Platform Implementation Details //! On Windows the `ShellExecuteW` Windows API function is used. On Mac the system `open` command is -//! used. On other platforms, the system `xdg-open` script is used. +//! used. On other platforms, the `xdg-open` script is used. The system `xdg-open` is not used; //! instead a version is embedded within this library. #![warn( @@ -167,12 +167,14 @@ #[cfg(not(any(target_os = "windows", target_os = "macos")))] fn open_sys(path: &OsStr) -> Result<(), OpenError> { + const XDG_OPEN_SCRIPT: &[u8] = include_bytes!("xdg-open"); + open_not_windows( - "xdg-open", + "sh", path, - &[], - None, - "xdg-open", + &["-s"], + Some(XDG_OPEN_SCRIPT), + "xdg-open (internal)", ) } diff -Nru cargo-0.53.0/vendor/opener/src/xdg-open cargo-0.54.0/vendor/opener/src/xdg-open --- cargo-0.53.0/vendor/opener/src/xdg-open 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/vendor/opener/src/xdg-open 2021-10-26 21:08:52.000000000 +0000 @@ -0,0 +1,1066 @@ +#!/bin/sh +#--------------------------------------------- +# xdg-open +# +# Utility script to open a URL in the registered default application. +# +# Refer to the usage() function below for usage. +# +# Copyright 2009-2010, Fathi Boudra +# Copyright 2009-2016, Rex Dieter +# Copyright 2006, Kevin Krammer +# Copyright 2006, Jeremy White +# +# LICENSE: +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR +# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +#--------------------------------------------- + +manualpage() +{ +cat << _MANUALPAGE +Name + + xdg-open -- opens a file or URL in the user's preferred + application + +Synopsis + + xdg-open { file | URL } + + xdg-open { --help | --manual | --version } + +Description + + xdg-open opens a file or URL in the user's preferred + application. If a URL is provided the URL will be opened in the + user's preferred web browser. If a file is provided the file + will be opened in the preferred application for files of that + type. xdg-open supports file, ftp, http and https URLs. + + xdg-open is for use inside a desktop session only. It is not + recommended to use xdg-open as root. + +Options + + --help + Show command synopsis. + + --manual + Show this manual page. + + --version + Show the xdg-utils version information. + +Exit Codes + + An exit code of 0 indicates success while a non-zero exit code + indicates failure. The following failure codes can be returned: + + 1 + Error in command line syntax. + + 2 + One of the files passed on the command line did not + exist. + + 3 + A required tool could not be found. + + 4 + The action failed. + +See Also + + xdg-mime(1), xdg-settings(1), MIME applications associations + specification + +Examples + +xdg-open 'http://www.freedesktop.org/' + + Opens the freedesktop.org website in the user's default + browser. + +xdg-open /tmp/foobar.png + + Opens the PNG image file /tmp/foobar.png in the user's default + image viewing application. +_MANUALPAGE +} + +usage() +{ +cat << _USAGE + xdg-open -- opens a file or URL in the user's preferred + application + +Synopsis + + xdg-open { file | URL } + + xdg-open { --help | --manual | --version } + +_USAGE +} + +#@xdg-utils-common@ + +#---------------------------------------------------------------------------- +# Common utility functions included in all XDG wrapper scripts +#---------------------------------------------------------------------------- + +DEBUG() +{ + [ -z "${XDG_UTILS_DEBUG_LEVEL}" ] && return 0; + [ ${XDG_UTILS_DEBUG_LEVEL} -lt $1 ] && return 0; + shift + echo "$@" >&2 +} + +# This handles backslashes but not quote marks. +first_word() +{ + read first rest + echo "$first" +} + +#------------------------------------------------------------- +# map a binary to a .desktop file +binary_to_desktop_file() +{ + search="${XDG_DATA_HOME:-$HOME/.local/share}:${XDG_DATA_DIRS:-/usr/local/share:/usr/share}" + binary="`which "$1"`" + binary="`readlink -f "$binary"`" + base="`basename "$binary"`" + IFS=: + for dir in $search; do + unset IFS + [ "$dir" ] || continue + [ -d "$dir/applications" ] || [ -d "$dir/applnk" ] || continue + for file in "$dir"/applications/*.desktop "$dir"/applications/*/*.desktop "$dir"/applnk/*.desktop "$dir"/applnk/*/*.desktop; do + [ -r "$file" ] || continue + # Check to make sure it's worth the processing. + grep -q "^Exec.*$base" "$file" || continue + # Make sure it's a visible desktop file (e.g. not "preferred-web-browser.desktop"). + grep -Eq "^(NoDisplay|Hidden)=true" "$file" && continue + command="`grep -E "^Exec(\[[^]=]*])?=" "$file" | cut -d= -f 2- | first_word`" + command="`which "$command"`" + if [ x"`readlink -f "$command"`" = x"$binary" ]; then + # Fix any double slashes that got added path composition + echo "$file" | sed -e 's,//*,/,g' + return + fi + done + done +} + +#------------------------------------------------------------- +# map a .desktop file to a binary +desktop_file_to_binary() +{ + search="${XDG_DATA_HOME:-$HOME/.local/share}:${XDG_DATA_DIRS:-/usr/local/share:/usr/share}" + desktop="`basename "$1"`" + IFS=: + for dir in $search; do + unset IFS + [ "$dir" ] && [ -d "$dir/applications" ] || [ -d "$dir/applnk" ] || continue + # Check if desktop file contains - + if [ "${desktop#*-}" != "$desktop" ]; then + vendor=${desktop%-*} + app=${desktop#*-} + if [ -r $dir/applications/$vendor/$app ]; then + file_path=$dir/applications/$vendor/$app + elif [ -r $dir/applnk/$vendor/$app ]; then + file_path=$dir/applnk/$vendor/$app + fi + fi + if test -z "$file_path" ; then + for indir in "$dir"/applications/ "$dir"/applications/*/ "$dir"/applnk/ "$dir"/applnk/*/; do + file="$indir/$desktop" + if [ -r "$file" ]; then + file_path=$file + break + fi + done + fi + if [ -r "$file_path" ]; then + # Remove any arguments (%F, %f, %U, %u, etc.). + command="`grep -E "^Exec(\[[^]=]*])?=" "$file_path" | cut -d= -f 2- | first_word`" + command="`which "$command"`" + readlink -f "$command" + return + fi + done +} + +#------------------------------------------------------------- +# Exit script on successfully completing the desired operation + +exit_success() +{ + if [ $# -gt 0 ]; then + echo "$@" + echo + fi + + exit 0 +} + + +#----------------------------------------- +# Exit script on malformed arguments, not enough arguments +# or missing required option. +# prints usage information + +exit_failure_syntax() +{ + if [ $# -gt 0 ]; then + echo "xdg-open: $@" >&2 + echo "Try 'xdg-open --help' for more information." >&2 + else + usage + echo "Use 'man xdg-open' or 'xdg-open --manual' for additional info." + fi + + exit 1 +} + +#------------------------------------------------------------- +# Exit script on missing file specified on command line + +exit_failure_file_missing() +{ + if [ $# -gt 0 ]; then + echo "xdg-open: $@" >&2 + fi + + exit 2 +} + +#------------------------------------------------------------- +# Exit script on failure to locate necessary tool applications + +exit_failure_operation_impossible() +{ + if [ $# -gt 0 ]; then + echo "xdg-open: $@" >&2 + fi + + exit 3 +} + +#------------------------------------------------------------- +# Exit script on failure returned by a tool application + +exit_failure_operation_failed() +{ + if [ $# -gt 0 ]; then + echo "xdg-open: $@" >&2 + fi + + exit 4 +} + +#------------------------------------------------------------ +# Exit script on insufficient permission to read a specified file + +exit_failure_file_permission_read() +{ + if [ $# -gt 0 ]; then + echo "xdg-open: $@" >&2 + fi + + exit 5 +} + +#------------------------------------------------------------ +# Exit script on insufficient permission to write a specified file + +exit_failure_file_permission_write() +{ + if [ $# -gt 0 ]; then + echo "xdg-open: $@" >&2 + fi + + exit 6 +} + +check_input_file() +{ + if [ ! -e "$1" ]; then + exit_failure_file_missing "file '$1' does not exist" + fi + if [ ! -r "$1" ]; then + exit_failure_file_permission_read "no permission to read file '$1'" + fi +} + +check_vendor_prefix() +{ + file_label="$2" + [ -n "$file_label" ] || file_label="filename" + file=`basename "$1"` + case "$file" in + [[:alpha:]]*-*) + return + ;; + esac + + echo "xdg-open: $file_label '$file' does not have a proper vendor prefix" >&2 + echo 'A vendor prefix consists of alpha characters ([a-zA-Z]) and is terminated' >&2 + echo 'with a dash ("-"). An example '"$file_label"' is '"'example-$file'" >&2 + echo "Use --novendor to override or 'xdg-open --manual' for additional info." >&2 + exit 1 +} + +check_output_file() +{ + # if the file exists, check if it is writeable + # if it does not exists, check if we are allowed to write on the directory + if [ -e "$1" ]; then + if [ ! -w "$1" ]; then + exit_failure_file_permission_write "no permission to write to file '$1'" + fi + else + DIR=`dirname "$1"` + if [ ! -w "$DIR" ] || [ ! -x "$DIR" ]; then + exit_failure_file_permission_write "no permission to create file '$1'" + fi + fi +} + +#---------------------------------------- +# Checks for shared commands, e.g. --help + +check_common_commands() +{ + while [ $# -gt 0 ] ; do + parm="$1" + shift + + case "$parm" in + --help) + usage + echo "Use 'man xdg-open' or 'xdg-open --manual' for additional info." + exit_success + ;; + + --manual) + manualpage + exit_success + ;; + + --version) + echo "xdg-open 1.1.3+" + exit_success + ;; + esac + done +} + +check_common_commands "$@" + +[ -z "${XDG_UTILS_DEBUG_LEVEL}" ] && unset XDG_UTILS_DEBUG_LEVEL; +if [ ${XDG_UTILS_DEBUG_LEVEL-0} -lt 1 ]; then + # Be silent + xdg_redirect_output=" > /dev/null 2> /dev/null" +else + # All output to stderr + xdg_redirect_output=" >&2" +fi + +#-------------------------------------- +# Checks for known desktop environments +# set variable DE to the desktop environments name, lowercase + +detectDE() +{ + # see https://bugs.freedesktop.org/show_bug.cgi?id=34164 + unset GREP_OPTIONS + + if [ -n "${XDG_CURRENT_DESKTOP}" ]; then + case "${XDG_CURRENT_DESKTOP}" in + # only recently added to menu-spec, pre-spec X- still in use + Cinnamon|X-Cinnamon) + DE=cinnamon; + ;; + ENLIGHTENMENT) + DE=enlightenment; + ;; + # GNOME, GNOME-Classic:GNOME, or GNOME-Flashback:GNOME + GNOME*) + DE=gnome; + ;; + KDE) + DE=kde; + ;; + DEEPIN|Deepin|deepin) + DE=deepin; + ;; + LXDE) + DE=lxde; + ;; + LXQt) + DE=lxqt; + ;; + MATE) + DE=mate; + ;; + XFCE) + DE=xfce + ;; + X-Generic) + DE=generic + ;; + esac + fi + + if [ x"$DE" = x"" ]; then + # classic fallbacks + if [ x"$KDE_FULL_SESSION" != x"" ]; then DE=kde; + elif [ x"$GNOME_DESKTOP_SESSION_ID" != x"" ]; then DE=gnome; + elif [ x"$MATE_DESKTOP_SESSION_ID" != x"" ]; then DE=mate; + elif `dbus-send --print-reply --dest=org.freedesktop.DBus /org/freedesktop/DBus org.freedesktop.DBus.GetNameOwner string:org.gnome.SessionManager > /dev/null 2>&1` ; then DE=gnome; + elif xprop -root _DT_SAVE_MODE 2> /dev/null | grep ' = \"xfce4\"$' >/dev/null 2>&1; then DE=xfce; + elif xprop -root 2> /dev/null | grep -i '^xfce_desktop_window' >/dev/null 2>&1; then DE=xfce + elif echo $DESKTOP | grep -q '^Enlightenment'; then DE=enlightenment; + elif [ x"$LXQT_SESSION_CONFIG" != x"" ]; then DE=lxqt; + fi + fi + + if [ x"$DE" = x"" ]; then + # fallback to checking $DESKTOP_SESSION + case "$DESKTOP_SESSION" in + gnome) + DE=gnome; + ;; + LXDE|Lubuntu) + DE=lxde; + ;; + MATE) + DE=mate; + ;; + xfce|xfce4|'Xfce Session') + DE=xfce; + ;; + esac + fi + + if [ x"$DE" = x"" ]; then + # fallback to uname output for other platforms + case "$(uname 2>/dev/null)" in + CYGWIN*) + DE=cygwin; + ;; + Darwin) + DE=darwin; + ;; + esac + fi + + if [ x"$DE" = x"gnome" ]; then + # gnome-default-applications-properties is only available in GNOME 2.x + # but not in GNOME 3.x + which gnome-default-applications-properties > /dev/null 2>&1 || DE="gnome3" + fi + + if [ -f "$XDG_RUNTIME_DIR/flatpak-info" ]; then + DE="flatpak" + fi +} + +#---------------------------------------------------------------------------- +# kfmclient exec/openURL can give bogus exit value in KDE <= 3.5.4 +# It also always returns 1 in KDE 3.4 and earlier +# Simply return 0 in such case + +kfmclient_fix_exit_code() +{ + version=`LC_ALL=C.UTF-8 kde-config --version 2>/dev/null | grep '^KDE'` + major=`echo $version | sed 's/KDE.*: \([0-9]\).*/\1/'` + minor=`echo $version | sed 's/KDE.*: [0-9]*\.\([0-9]\).*/\1/'` + release=`echo $version | sed 's/KDE.*: [0-9]*\.[0-9]*\.\([0-9]\).*/\1/'` + test "$major" -gt 3 && return $1 + test "$minor" -gt 5 && return $1 + test "$release" -gt 4 && return $1 + return 0 +} + +#---------------------------------------------------------------------------- +# Returns true if there is a graphical display attached. + +has_display() +{ + if [ -n "$DISPLAY" ] || [ -n "$WAYLAND_DISPLAY" ]; then + return 0 + else + return 1 + fi +} + +# This handles backslashes but not quote marks. +last_word() +{ + read first rest + echo "$rest" +} + +# Get the value of a key in a desktop file's Desktop Entry group. +# Example: Use get_key foo.desktop Exec +# to get the values of the Exec= key for the Desktop Entry group. +get_key() +{ + local file="${1}" + local key="${2}" + local desktop_entry="" + + IFS_="${IFS}" + IFS="" + while read line + do + case "$line" in + "[Desktop Entry]") + desktop_entry="y" + ;; + # Reset match flag for other groups + "["*) + desktop_entry="" + ;; + "${key}="*) + # Only match Desktop Entry group + if [ -n "${desktop_entry}" ] + then + echo "${line}" | cut -d= -f 2- + fi + esac + done < "${file}" + IFS="${IFS_}" +} + +# Returns true if argument is a file:// URL or path +is_file_url_or_path() +{ + if echo "$1" | grep -q '^file://' \ + || ! echo "$1" | egrep -q '^[[:alpha:]][[:alpha:][:digit:]+\.\-]*:'; then + return 0 + else + return 1 + fi +} + +# If argument is a file URL, convert it to a (percent-decoded) path. +# If not, leave it as it is. +file_url_to_path() +{ + local file="$1" + if echo "$file" | grep -q '^file://\(localhost\)\?/'; then + file=${file#file://localhost} + file=${file#file://} + file=${file%%#*} + file=$(echo "$file" | sed -r 's/\?.*$//') + local printf=printf + if [ -x /usr/bin/printf ]; then + printf=/usr/bin/printf + fi + file=$($printf "$(echo "$file" | sed -e 's@%\([a-f0-9A-F]\{2\}\)@\\x\1@g')") + fi + echo "$file" +} + +open_cygwin() +{ + cygstart "$1" + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_darwin() +{ + open "$1" + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_kde() +{ + if [ -n "${KDE_SESSION_VERSION}" ]; then + case "${KDE_SESSION_VERSION}" in + 4) + kde-open "$1" + ;; + 5) + kde-open${KDE_SESSION_VERSION} "$1" + ;; + esac + else + kfmclient exec "$1" + kfmclient_fix_exit_code $? + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_deepin() +{ + if dde-open -version >/dev/null 2>&1; then + dde-open "$1" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_gnome3() +{ + if gio help open 2>/dev/null 1>&2; then + gio open "$1" + elif gvfs-open --help 2>/dev/null 1>&2; then + gvfs-open "$1" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_gnome() +{ + if gio help open 2>/dev/null 1>&2; then + gio open "$1" + elif gvfs-open --help 2>/dev/null 1>&2; then + gvfs-open "$1" + elif gnome-open --help 2>/dev/null 1>&2; then + gnome-open "$1" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_mate() +{ + if gio help open 2>/dev/null 1>&2; then + gio open "$1" + elif gvfs-open --help 2>/dev/null 1>&2; then + gvfs-open "$1" + elif mate-open --help 2>/dev/null 1>&2; then + mate-open "$1" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_xfce() +{ + if exo-open --help 2>/dev/null 1>&2; then + exo-open "$1" + elif gio help open 2>/dev/null 1>&2; then + gio open "$1" + elif gvfs-open --help 2>/dev/null 1>&2; then + gvfs-open "$1" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_enlightenment() +{ + if enlightenment_open --help 2>/dev/null 1>&2; then + enlightenment_open "$1" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_flatpak() +{ + gdbus call --session \ + --dest org.freedesktop.portal.Desktop \ + --object-path /org/freedesktop/portal/desktop \ + --method org.freedesktop.portal.OpenURI.OpenURI \ + "" "$1" {} + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +#----------------------------------------- +# Recursively search .desktop file + +search_desktop_file() +{ + local default="$1" + local dir="$2" + local target="$3" + + local file="" + # look for both vendor-app.desktop, vendor/app.desktop + if [ -r "$dir/$default" ]; then + file="$dir/$default" + elif [ -r "$dir/`echo $default | sed -e 's|-|/|'`" ]; then + file="$dir/`echo $default | sed -e 's|-|/|'`" + fi + + if [ -r "$file" ] ; then + command="$(get_key "${file}" "Exec" | first_word)" + command_exec=`which $command 2>/dev/null` + icon="$(get_key "${file}" "Icon")" + # FIXME: Actually LC_MESSAGES should be used as described in + # http://standards.freedesktop.org/desktop-entry-spec/latest/ar01s04.html + localised_name="$(get_key "${file}" "Name")" + set -- $(get_key "${file}" "Exec" | last_word) + # We need to replace any occurrence of "%f", "%F" and + # the like by the target file. We examine each + # argument and append the modified argument to the + # end then shift. + local args=$# + local replaced=0 + while [ $args -gt 0 ]; do + case $1 in + %[c]) + replaced=1 + arg="${localised_name}" + shift + set -- "$@" "$arg" + ;; + %[fFuU]) + replaced=1 + arg="$target" + shift + set -- "$@" "$arg" + ;; + %[i]) + replaced=1 + shift + set -- "$@" "--icon" "$icon" + ;; + *) + arg="$1" + shift + set -- "$@" "$arg" + ;; + esac + args=$(( $args - 1 )) + done + [ $replaced -eq 1 ] || set -- "$@" "$target" + "$command_exec" "$@" + + if [ $? -eq 0 ]; then + exit_success + fi + fi + + for d in $dir/*/; do + [ -d "$d" ] && search_desktop_file "$default" "$d" "$target" + done +} + + +open_generic_xdg_mime() +{ + filetype="$2" + default=`xdg-mime query default "$filetype"` + if [ -n "$default" ] ; then + xdg_user_dir="$XDG_DATA_HOME" + [ -n "$xdg_user_dir" ] || xdg_user_dir="$HOME/.local/share" + + xdg_system_dirs="$XDG_DATA_DIRS" + [ -n "$xdg_system_dirs" ] || xdg_system_dirs=/usr/local/share/:/usr/share/ + +DEBUG 3 "$xdg_user_dir:$xdg_system_dirs" + for x in `echo "$xdg_user_dir:$xdg_system_dirs" | sed 's/:/ /g'`; do + search_desktop_file "$default" "$x/applications/" "$1" + done + fi +} + +open_generic_xdg_file_mime() +{ + filetype=`xdg-mime query filetype "$1" | sed "s/;.*//"` + open_generic_xdg_mime "$1" "$filetype" +} + +open_generic_xdg_x_scheme_handler() +{ + scheme="`echo $1 | sed -n 's/\(^[[:alnum:]+\.-]*\):.*$/\1/p'`" + if [ -n $scheme ]; then + filetype="x-scheme-handler/$scheme" + open_generic_xdg_mime "$1" "$filetype" + fi +} + +has_single_argument() +{ + test $# = 1 +} + +open_envvar() +{ + local oldifs="$IFS" + local browser browser_with_arg + + IFS=":" + for browser in $BROWSER; do + IFS="$oldifs" + + if [ -z "$browser" ]; then + continue + fi + + if echo "$browser" | grep -q %s; then + # Avoid argument injection. + # See https://bugs.freedesktop.org/show_bug.cgi?id=103807 + # URIs don't have IFS characters spaces anyway. + has_single_argument $1 && $(printf "$browser" "$1") + else + $browser "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + fi + done +} + +open_generic() +{ + if is_file_url_or_path "$1"; then + local file="$(file_url_to_path "$1")" + + check_input_file "$file" + + if has_display; then + filetype=`xdg-mime query filetype "$file" | sed "s/;.*//"` + open_generic_xdg_mime "$file" "$filetype" + fi + + if which run-mailcap 2>/dev/null 1>&2; then + run-mailcap --action=view "$file" + if [ $? -eq 0 ]; then + exit_success + fi + fi + + if has_display && mimeopen -v 2>/dev/null 1>&2; then + mimeopen -L -n "$file" + if [ $? -eq 0 ]; then + exit_success + fi + fi + fi + + if has_display; then + open_generic_xdg_x_scheme_handler "$1" + fi + + if [ -n "$BROWSER" ]; then + open_envvar "$1" + fi + + # if BROWSER variable is not set, check some well known browsers instead + if [ x"$BROWSER" = x"" ]; then + BROWSER=www-browser:links2:elinks:links:lynx:w3m + if has_display; then + BROWSER=x-www-browser:firefox:iceweasel:seamonkey:mozilla:epiphany:konqueror:chromium:chromium-browser:google-chrome:$BROWSER + fi + fi + + open_envvar "$1" + + exit_failure_operation_impossible "no method available for opening '$1'" +} + +open_lxde() +{ + + # pcmanfm only knows how to handle file:// urls and filepaths, it seems. + if pcmanfm --help >/dev/null 2>&1 && is_file_url_or_path "$1"; then + local file="$(file_url_to_path "$1")" + + # handle relative paths + if ! echo "$file" | grep -q ^/; then + file="$(pwd)/$file" + fi + + pcmanfm "$file" + else + open_generic "$1" + fi + + if [ $? -eq 0 ]; then + exit_success + else + exit_failure_operation_failed + fi +} + +open_lxqt() +{ + open_generic "$1" +} + +[ x"$1" != x"" ] || exit_failure_syntax + +url= +while [ $# -gt 0 ] ; do + parm="$1" + shift + + case "$parm" in + -*) + exit_failure_syntax "unexpected option '$parm'" + ;; + + *) + if [ -n "$url" ] ; then + exit_failure_syntax "unexpected argument '$parm'" + fi + url="$parm" + ;; + esac +done + +if [ -z "${url}" ] ; then + exit_failure_syntax "file or URL argument missing" +fi + +detectDE + +if [ x"$DE" = x"" ]; then + DE=generic +fi + +DEBUG 2 "Selected DE $DE" + +# sanitize BROWSER (avoid calling ourselves in particular) +case "${BROWSER}" in + *:"xdg-open"|"xdg-open":*) + BROWSER=$(echo $BROWSER | sed -e 's|:xdg-open||g' -e 's|xdg-open:||g') + ;; + "xdg-open") + BROWSER= + ;; +esac + +case "$DE" in + kde) + open_kde "$url" + ;; + + deepin) + open_deepin "$url" + ;; + + gnome3|cinnamon) + open_gnome3 "$url" + ;; + + gnome) + open_gnome "$url" + ;; + + mate) + open_mate "$url" + ;; + + xfce) + open_xfce "$url" + ;; + + lxde) + open_lxde "$url" + ;; + + lxqt) + open_lxqt "$url" + ;; + + enlightenment) + open_enlightenment "$url" + ;; + + cygwin) + open_cygwin "$url" + ;; + + darwin) + open_darwin "$url" + ;; + + flatpak) + open_flatpak "$url" + ;; + + generic) + open_generic "$url" + ;; + + *) + exit_failure_operation_impossible "no method available for opening '$url'" + ;; +esac diff -Nru cargo-0.53.0/vendor/pkg-config/.cargo-checksum.json cargo-0.54.0/vendor/pkg-config/.cargo-checksum.json --- cargo-0.53.0/vendor/pkg-config/.cargo-checksum.json 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/.cargo-checksum.json 2021-10-26 21:08:52.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"7c9b1041b4387893b91ee6746cddfc28516aff326a3519fb2adf820932c5e6cb"} \ No newline at end of file +{"files":{},"package":"12295df4f294471248581bc09bef3c38a5e46f1e36d6a37353621a0c6c357e1f"} \ No newline at end of file diff -Nru cargo-0.53.0/vendor/pkg-config/Cargo.toml cargo-0.54.0/vendor/pkg-config/Cargo.toml --- cargo-0.53.0/vendor/pkg-config/Cargo.toml 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/Cargo.toml 2021-10-26 21:08:52.000000000 +0000 @@ -11,7 +11,7 @@ [package] name = "pkg-config" -version = "0.3.20" +version = "0.3.22" authors = ["Alex Crichton "] description = "A library to run the pkg-config system tool at build time in order to be used in\nCargo build scripts.\n" documentation = "https://docs.rs/pkg-config" diff -Nru cargo-0.53.0/vendor/pkg-config/CHANGELOG.md cargo-0.54.0/vendor/pkg-config/CHANGELOG.md --- cargo-0.53.0/vendor/pkg-config/CHANGELOG.md 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/CHANGELOG.md 2021-10-26 21:08:52.000000000 +0000 @@ -5,6 +5,29 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.3.22] - 2021-10-24 + +### Fixed + +- `pkg-config` compiles again with Rust 1.30 or newer. 0.3.21 accidentally + made use of API only available since 1.40 (#124, #125). + +### Changed + +- Switched from Travis to GitHub Actions for the CI. Travis is dysfunctional + since quite some time (#126). + +## [0.3.21] - 2021-10-22 + +### Fixed + +- Tests succeed again on macOS (#122). + +### Changed + +- Improve error message in case of missing pkg-config and provide instructions + how it can be installed (#121). + ## [0.3.20] - 2021-09-25 ### Fixed diff -Nru cargo-0.53.0/vendor/pkg-config/debian/patches/no-special-snowflake-env.patch cargo-0.54.0/vendor/pkg-config/debian/patches/no-special-snowflake-env.patch --- cargo-0.53.0/vendor/pkg-config/debian/patches/no-special-snowflake-env.patch 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/debian/patches/no-special-snowflake-env.patch 2021-10-26 21:08:52.000000000 +0000 @@ -14,22 +14,22 @@ //! There are also a number of environment variables which can configure how a //! library is linked to (dynamically vs statically). These variables control //! whether the `--static` flag is passed. Note that this behavior can be -@@ -139,14 +132,6 @@ +@@ -145,14 +138,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Error::EnvNoPkgConfig(ref name) => write!(f, "Aborted because {} is set", name), - Error::CrossCompilation => f.write_str( -- "pkg-config has not been configured to support cross-compilation. -- -- Install a sysroot for the target platform and configure it via -- PKG_CONFIG_SYSROOT_DIR and PKG_CONFIG_PATH, or install a -- cross-compiling wrapper for pkg-config and set it via +- "pkg-config has not been configured to support cross-compilation.\n\ +- \n\ +- Install a sysroot for the target platform and configure it via\n\ +- PKG_CONFIG_SYSROOT_DIR and PKG_CONFIG_PATH, or install a\n\ +- cross-compiling wrapper for pkg-config and set it via\n\ - PKG_CONFIG environment variable.", - ), Error::Command { ref command, ref cause, -@@ -170,7 +155,7 @@ +@@ -205,7 +190,7 @@ } Ok(()) } @@ -38,7 +38,7 @@ } } } -@@ -336,6 +321,8 @@ +@@ -371,6 +356,8 @@ if host == target { return true; } diff -Nru cargo-0.53.0/vendor/pkg-config/.pc/debian-auto-cross-compile.patch/src/lib.rs cargo-0.54.0/vendor/pkg-config/.pc/debian-auto-cross-compile.patch/src/lib.rs --- cargo-0.53.0/vendor/pkg-config/.pc/debian-auto-cross-compile.patch/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/.pc/debian-auto-cross-compile.patch/src/lib.rs 2021-10-26 21:08:52.000000000 +0000 @@ -97,7 +97,6 @@ } /// Represents all reasons `pkg-config` might not succeed or be run at all. -#[derive(Debug)] pub enum Error { /// Aborted because of `*_NO_PKG_CONFIG` environment variable. /// @@ -128,6 +127,13 @@ impl error::Error for Error {} +impl fmt::Debug for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + // Failed `unwrap()` prints Debug representation, but the default debug format lacks helpful instructions for the end users + ::fmt(self, f) + } +} + impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { @@ -135,7 +141,36 @@ Error::Command { ref command, ref cause, - } => write!(f, "Failed to run `{}`: {}", command, cause), + } => { + match cause.kind() { + io::ErrorKind::NotFound => { + let crate_name = + std::env::var("CARGO_PKG_NAME").unwrap_or_else(|_| "sys".to_owned()); + let instructions = if cfg!(target_os = "macos") || cfg!(target_os = "ios") { + "Try `brew install pkg-config` if you have Homebrew.\n" + } else if cfg!(unix) { + "Try `apt install pkg-config`, or `yum install pkg-config`,\n\ + or `pkg install pkg-config` depending on your distribution.\n" + } else { + "" // There's no easy fix for Windows users + }; + write!(f, "Could not run `{command}`\n\ + The pkg-config command could not be found.\n\ + \n\ + Most likely, you need to install a pkg-config package for your OS.\n\ + {instructions}\ + \n\ + If you've already installed it, ensure the pkg-config command is one of the\n\ + directories in the PATH environment variable.\n\ + \n\ + If you did not expect this build to link to a pre-installed system library,\n\ + then check documentation of the {crate_name} crate for an option to\n\ + build the library from source, or disable features or dependencies\n\ + that require pkg-config.", command = command, instructions = instructions, crate_name = crate_name) + } + _ => write!(f, "Failed to run command `{}`, because: {}", command, cause), + } + } Error::Failure { ref command, ref output, @@ -664,23 +699,25 @@ #[test] #[cfg(target_os = "macos")] fn system_library_mac_test() { + use std::path::Path; + let system_roots = vec![PathBuf::from("/Library"), PathBuf::from("/System")]; assert!(!is_static_available( "PluginManager", - system_roots, + &system_roots, &[PathBuf::from("/Library/Frameworks")] )); assert!(!is_static_available( "python2.7", - system_roots, + &system_roots, &[PathBuf::from( "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/config" )] )); assert!(!is_static_available( "ffi_convenience", - system_roots, + &system_roots, &[PathBuf::from( "/Library/Ruby/Gems/2.0.0/gems/ffi-1.9.10/ext/ffi_c/libffi-x86_64/.libs" )] @@ -690,7 +727,7 @@ if Path::new("/usr/local/lib/libpng16.a").exists() { assert!(is_static_available( "png16", - system_roots, + &system_roots, &[PathBuf::from("/usr/local/lib")] )); diff -Nru cargo-0.53.0/vendor/pkg-config/.pc/no-special-snowflake-env.patch/src/lib.rs cargo-0.54.0/vendor/pkg-config/.pc/no-special-snowflake-env.patch/src/lib.rs --- cargo-0.53.0/vendor/pkg-config/.pc/no-special-snowflake-env.patch/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/.pc/no-special-snowflake-env.patch/src/lib.rs 2021-10-26 21:08:52.000000000 +0000 @@ -104,7 +104,6 @@ } /// Represents all reasons `pkg-config` might not succeed or be run at all. -#[derive(Debug)] pub enum Error { /// Aborted because of `*_NO_PKG_CONFIG` environment variable. /// @@ -135,22 +134,58 @@ impl error::Error for Error {} +impl fmt::Debug for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + // Failed `unwrap()` prints Debug representation, but the default debug format lacks helpful instructions for the end users + ::fmt(self, f) + } +} + impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Error::EnvNoPkgConfig(ref name) => write!(f, "Aborted because {} is set", name), Error::CrossCompilation => f.write_str( - "pkg-config has not been configured to support cross-compilation. - - Install a sysroot for the target platform and configure it via - PKG_CONFIG_SYSROOT_DIR and PKG_CONFIG_PATH, or install a - cross-compiling wrapper for pkg-config and set it via + "pkg-config has not been configured to support cross-compilation.\n\ + \n\ + Install a sysroot for the target platform and configure it via\n\ + PKG_CONFIG_SYSROOT_DIR and PKG_CONFIG_PATH, or install a\n\ + cross-compiling wrapper for pkg-config and set it via\n\ PKG_CONFIG environment variable.", ), Error::Command { ref command, ref cause, - } => write!(f, "Failed to run `{}`: {}", command, cause), + } => { + match cause.kind() { + io::ErrorKind::NotFound => { + let crate_name = + std::env::var("CARGO_PKG_NAME").unwrap_or_else(|_| "sys".to_owned()); + let instructions = if cfg!(target_os = "macos") || cfg!(target_os = "ios") { + "Try `brew install pkg-config` if you have Homebrew.\n" + } else if cfg!(unix) { + "Try `apt install pkg-config`, or `yum install pkg-config`,\n\ + or `pkg install pkg-config` depending on your distribution.\n" + } else { + "" // There's no easy fix for Windows users + }; + write!(f, "Could not run `{command}`\n\ + The pkg-config command could not be found.\n\ + \n\ + Most likely, you need to install a pkg-config package for your OS.\n\ + {instructions}\ + \n\ + If you've already installed it, ensure the pkg-config command is one of the\n\ + directories in the PATH environment variable.\n\ + \n\ + If you did not expect this build to link to a pre-installed system library,\n\ + then check documentation of the {crate_name} crate for an option to\n\ + build the library from source, or disable features or dependencies\n\ + that require pkg-config.", command = command, instructions = instructions, crate_name = crate_name) + } + _ => write!(f, "Failed to run command `{}`, because: {}", command, cause), + } + } Error::Failure { ref command, ref output, @@ -677,23 +712,25 @@ #[test] #[cfg(target_os = "macos")] fn system_library_mac_test() { + use std::path::Path; + let system_roots = vec![PathBuf::from("/Library"), PathBuf::from("/System")]; assert!(!is_static_available( "PluginManager", - system_roots, + &system_roots, &[PathBuf::from("/Library/Frameworks")] )); assert!(!is_static_available( "python2.7", - system_roots, + &system_roots, &[PathBuf::from( "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/config" )] )); assert!(!is_static_available( "ffi_convenience", - system_roots, + &system_roots, &[PathBuf::from( "/Library/Ruby/Gems/2.0.0/gems/ffi-1.9.10/ext/ffi_c/libffi-x86_64/.libs" )] @@ -703,7 +740,7 @@ if Path::new("/usr/local/lib/libpng16.a").exists() { assert!(is_static_available( "png16", - system_roots, + &system_roots, &[PathBuf::from("/usr/local/lib")] )); diff -Nru cargo-0.53.0/vendor/pkg-config/README.md cargo-0.54.0/vendor/pkg-config/README.md --- cargo-0.53.0/vendor/pkg-config/README.md 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/README.md 2021-10-26 21:08:52.000000000 +0000 @@ -1,6 +1,6 @@ # pkg-config-rs -[![Build Status](https://travis-ci.com/rust-lang/pkg-config-rs.svg?branch=master)](https://travis-ci.com/rust-lang/pkg-config-rs) +[![Build Status](https://github.com/rust-lang/pkg-config-rs/actions/workflows/ci.yml/badge.svg)](https://github.com/rust-lang/pkg-config-rs/actions) [![Rust](https://img.shields.io/badge/rust-1.30%2B-blue.svg?maxAge=3600)](https://github.com/rust-lang/pkg-config-rs/) [Documentation](https://docs.rs/pkg-config) @@ -10,7 +10,7 @@ library is located. You can use this crate directly to probe for specific libraries, or use -[metadeps](https://github.com/joshtriplett/metadeps) to declare all your +[system-deps](https://github.com/gdesmott/system-deps) to declare all your `pkg-config` dependencies in `Cargo.toml`. This library requires Rust 1.30+. diff -Nru cargo-0.53.0/vendor/pkg-config/src/lib.rs cargo-0.54.0/vendor/pkg-config/src/lib.rs --- cargo-0.53.0/vendor/pkg-config/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/src/lib.rs 2021-10-26 21:08:52.000000000 +0000 @@ -97,7 +97,6 @@ } /// Represents all reasons `pkg-config` might not succeed or be run at all. -#[derive(Debug)] pub enum Error { /// Aborted because of `*_NO_PKG_CONFIG` environment variable. /// @@ -128,6 +127,13 @@ impl error::Error for Error {} +impl fmt::Debug for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + // Failed `unwrap()` prints Debug representation, but the default debug format lacks helpful instructions for the end users + ::fmt(self, f) + } +} + impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { @@ -135,7 +141,36 @@ Error::Command { ref command, ref cause, - } => write!(f, "Failed to run `{}`: {}", command, cause), + } => { + match cause.kind() { + io::ErrorKind::NotFound => { + let crate_name = + std::env::var("CARGO_PKG_NAME").unwrap_or_else(|_| "sys".to_owned()); + let instructions = if cfg!(target_os = "macos") || cfg!(target_os = "ios") { + "Try `brew install pkg-config` if you have Homebrew.\n" + } else if cfg!(unix) { + "Try `apt install pkg-config`, or `yum install pkg-config`,\n\ + or `pkg install pkg-config` depending on your distribution.\n" + } else { + "" // There's no easy fix for Windows users + }; + write!(f, "Could not run `{command}`\n\ + The pkg-config command could not be found.\n\ + \n\ + Most likely, you need to install a pkg-config package for your OS.\n\ + {instructions}\ + \n\ + If you've already installed it, ensure the pkg-config command is one of the\n\ + directories in the PATH environment variable.\n\ + \n\ + If you did not expect this build to link to a pre-installed system library,\n\ + then check documentation of the {crate_name} crate for an option to\n\ + build the library from source, or disable features or dependencies\n\ + that require pkg-config.", command = command, instructions = instructions, crate_name = crate_name) + } + _ => write!(f, "Failed to run command `{}`, because: {}", command, cause), + } + } Error::Failure { ref command, ref output, @@ -668,23 +703,25 @@ #[test] #[cfg(target_os = "macos")] fn system_library_mac_test() { + use std::path::Path; + let system_roots = vec![PathBuf::from("/Library"), PathBuf::from("/System")]; assert!(!is_static_available( "PluginManager", - system_roots, + &system_roots, &[PathBuf::from("/Library/Frameworks")] )); assert!(!is_static_available( "python2.7", - system_roots, + &system_roots, &[PathBuf::from( "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/config" )] )); assert!(!is_static_available( "ffi_convenience", - system_roots, + &system_roots, &[PathBuf::from( "/Library/Ruby/Gems/2.0.0/gems/ffi-1.9.10/ext/ffi_c/libffi-x86_64/.libs" )] @@ -694,7 +731,7 @@ if Path::new("/usr/local/lib/libpng16.a").exists() { assert!(is_static_available( "png16", - system_roots, + &system_roots, &[PathBuf::from("/usr/local/lib")] )); diff -Nru cargo-0.53.0/vendor/pkg-config/src/lib.rs.rej cargo-0.54.0/vendor/pkg-config/src/lib.rs.rej --- cargo-0.53.0/vendor/pkg-config/src/lib.rs.rej 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/vendor/pkg-config/src/lib.rs.rej 2021-10-26 21:08:52.000000000 +0000 @@ -0,0 +1,17 @@ +--- src/lib.rs ++++ src/lib.rs +@@ -132,14 +125,6 @@ + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + match *self { + Error::EnvNoPkgConfig(ref name) => write!(f, "Aborted because {} is set", name), +- Error::CrossCompilation => f.write_str( +- "pkg-config has not been configured to support cross-compilation. +- +- Install a sysroot for the target platform and configure it via +- PKG_CONFIG_SYSROOT_DIR and PKG_CONFIG_PATH, or install a +- cross-compiling wrapper for pkg-config and set it via +- PKG_CONFIG environment variable.", +- ), + Error::Command { + ref command, + ref cause, diff -Nru cargo-0.53.0/vendor/ppv-lite86/.cargo-checksum.json cargo-0.54.0/vendor/ppv-lite86/.cargo-checksum.json --- cargo-0.53.0/vendor/ppv-lite86/.cargo-checksum.json 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ppv-lite86/.cargo-checksum.json 2021-10-26 21:08:52.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"c3ca011bd0129ff4ae15cd04c4eef202cadf6c51c21e47aba319b4e0501db741"} \ No newline at end of file +{"files":{},"package":"ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"} \ No newline at end of file diff -Nru cargo-0.53.0/vendor/ppv-lite86/Cargo.toml cargo-0.54.0/vendor/ppv-lite86/Cargo.toml --- cargo-0.53.0/vendor/ppv-lite86/Cargo.toml 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ppv-lite86/Cargo.toml 2021-10-26 21:08:52.000000000 +0000 @@ -12,7 +12,7 @@ [package] edition = "2018" name = "ppv-lite86" -version = "0.2.14" +version = "0.2.15" authors = ["The CryptoCorrosion Contributors"] description = "Implementation of the crypto-simd API for x86" keywords = ["crypto", "simd", "x86"] diff -Nru cargo-0.53.0/vendor/ppv-lite86/src/soft.rs cargo-0.54.0/vendor/ppv-lite86/src/soft.rs --- cargo-0.53.0/vendor/ppv-lite86/src/soft.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ppv-lite86/src/soft.rs 2021-10-26 21:08:52.000000000 +0000 @@ -175,26 +175,50 @@ impl StoreBytes for x2 { #[inline(always)] unsafe fn unsafe_read_le(input: &[u8]) -> Self { - let input = input.split_at(16); + let input = input.split_at(input.len() / 2); x2::new([W::unsafe_read_le(input.0), W::unsafe_read_le(input.1)]) } #[inline(always)] unsafe fn unsafe_read_be(input: &[u8]) -> Self { - x2::unsafe_read_le(input).bswap() + let input = input.split_at(input.len() / 2); + x2::new([W::unsafe_read_be(input.0), W::unsafe_read_be(input.1)]) } #[inline(always)] fn write_le(self, out: &mut [u8]) { - let out = out.split_at_mut(16); + let out = out.split_at_mut(out.len() / 2); self.0[0].write_le(out.0); self.0[1].write_le(out.1); } #[inline(always)] fn write_be(self, out: &mut [u8]) { - let out = out.split_at_mut(16); + let out = out.split_at_mut(out.len() / 2); self.0[0].write_be(out.0); self.0[1].write_be(out.1); } } +impl LaneWords4 for x2 { + #[inline(always)] + fn shuffle_lane_words2301(self) -> Self { + Self::new([ + self.0[0].shuffle_lane_words2301(), + self.0[1].shuffle_lane_words2301(), + ]) + } + #[inline(always)] + fn shuffle_lane_words1230(self) -> Self { + Self::new([ + self.0[0].shuffle_lane_words1230(), + self.0[1].shuffle_lane_words1230(), + ]) + } + #[inline(always)] + fn shuffle_lane_words3012(self) -> Self { + Self::new([ + self.0[0].shuffle_lane_words3012(), + self.0[1].shuffle_lane_words3012(), + ]) + } +} #[derive(Copy, Clone, Default)] #[allow(non_camel_case_types)] @@ -379,30 +403,39 @@ impl StoreBytes for x4 { #[inline(always)] unsafe fn unsafe_read_le(input: &[u8]) -> Self { + let n = input.len() / 4; x4([ - W::unsafe_read_le(&input[0..16]), - W::unsafe_read_le(&input[16..32]), - W::unsafe_read_le(&input[32..48]), - W::unsafe_read_le(&input[48..64]), + W::unsafe_read_le(&input[..n]), + W::unsafe_read_le(&input[n..n * 2]), + W::unsafe_read_le(&input[n * 2..n * 3]), + W::unsafe_read_le(&input[n * 3..]), ]) } #[inline(always)] unsafe fn unsafe_read_be(input: &[u8]) -> Self { - x4::unsafe_read_le(input).bswap() + let n = input.len() / 4; + x4([ + W::unsafe_read_be(&input[..n]), + W::unsafe_read_be(&input[n..n * 2]), + W::unsafe_read_be(&input[n * 2..n * 3]), + W::unsafe_read_be(&input[n * 3..]), + ]) } #[inline(always)] fn write_le(self, out: &mut [u8]) { - self.0[0].write_le(&mut out[0..16]); - self.0[1].write_le(&mut out[16..32]); - self.0[2].write_le(&mut out[32..48]); - self.0[3].write_le(&mut out[48..64]); + let n = out.len() / 4; + self.0[0].write_le(&mut out[..n]); + self.0[1].write_le(&mut out[n..n * 2]); + self.0[2].write_le(&mut out[n * 2..n * 3]); + self.0[3].write_le(&mut out[n * 3..]); } #[inline(always)] fn write_be(self, out: &mut [u8]) { - self.0[0].write_be(&mut out[0..16]); - self.0[1].write_be(&mut out[16..32]); - self.0[2].write_be(&mut out[32..48]); - self.0[3].write_be(&mut out[48..64]); + let n = out.len() / 4; + self.0[0].write_be(&mut out[..n]); + self.0[1].write_be(&mut out[n..n * 2]); + self.0[2].write_be(&mut out[n * 2..n * 3]); + self.0[3].write_be(&mut out[n * 3..]); } } impl LaneWords4 for x4 { diff -Nru cargo-0.53.0/vendor/ppv-lite86/src/types.rs cargo-0.54.0/vendor/ppv-lite86/src/types.rs --- cargo-0.53.0/vendor/ppv-lite86/src/types.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ppv-lite86/src/types.rs 2021-10-26 21:08:52.000000000 +0000 @@ -141,6 +141,7 @@ + MultiLane<[M::u32x4; 2]> + ArithOps + Into + + StoreBytes { } pub trait u64x2x2: @@ -184,6 +185,7 @@ + ArithOps + LaneWords4 + Into + + StoreBytes { } pub trait u64x2x4: diff -Nru cargo-0.53.0/vendor/ppv-lite86/src/x86_64/mod.rs cargo-0.54.0/vendor/ppv-lite86/src/x86_64/mod.rs --- cargo-0.53.0/vendor/ppv-lite86/src/x86_64/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ppv-lite86/src/x86_64/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -79,7 +79,7 @@ type u64x2 = sse2::u64x2_sse2; type u128x1 = sse2::u128x1_sse2; - type u32x4x2 = sse2::u32x4x2_sse2; + type u32x4x2 = sse2::avx2::u32x4x2_avx2; type u64x2x2 = sse2::u64x2x2_sse2; type u64x4 = sse2::u64x4_sse2; type u128x2 = sse2::u128x2_sse2; diff -Nru cargo-0.53.0/vendor/ppv-lite86/src/x86_64/sse2.rs cargo-0.54.0/vendor/ppv-lite86/src/x86_64/sse2.rs --- cargo-0.53.0/vendor/ppv-lite86/src/x86_64/sse2.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/ppv-lite86/src/x86_64/sse2.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1015,16 +1015,6 @@ { } -impl u32x4x4> for u32x4x4_sse2 -where - u32x4_sse2: RotateEachWord32 + BSwap, - Avx2Machine: Machine, - u32x4x4_sse2: MultiLane<[ as Machine>::u32x4; 4]>, - u32x4x4_sse2: Vec4< as Machine>::u32x4>, - u32x4x4_sse2: Vec4Ext< as Machine>::u32x4>, - u32x4x4_sse2: Vector<[u32; 16]>, -{ -} impl u64x2x4> for u64x2x4_sse2 where u64x2_sse2: RotateEachWord64 + RotateEachWord32 + BSwap, @@ -1387,65 +1377,80 @@ pub mod avx2 { #![allow(non_camel_case_types)] - use crate::soft::x4; + use crate::soft::{x2, x4}; use crate::types::*; - use crate::x86_64::sse2::{u128x1_sse2, u32x4_sse2}; + use crate::x86_64::sse2::{u128x1_sse2, u32x4_sse2, G0}; use crate::x86_64::{vec256_storage, vec512_storage, Avx2Machine, YesS3, YesS4}; use core::arch::x86_64::*; use core::marker::PhantomData; use core::ops::*; #[derive(Copy, Clone)] - pub struct u32x4x4_avx2 { - x: [__m256i; 2], + pub struct u32x4x2_avx2 { + x: __m256i, ni: PhantomData, } - impl u32x4x4_avx2 { + impl u32x4x2_avx2 { #[inline(always)] - fn new(x: [__m256i; 2]) -> Self { + fn new(x: __m256i) -> Self { Self { x, ni: PhantomData } } } - impl u32x4x4> for u32x4x4_avx2 where NI: Copy {} - impl Store for u32x4x4_avx2 { + impl u32x4x2> for u32x4x2_avx2 where NI: Copy {} + impl Store for u32x4x2_avx2 { #[inline(always)] - unsafe fn unpack(p: vec512_storage) -> Self { - Self::new([p.avx[0].avx, p.avx[1].avx]) + unsafe fn unpack(p: vec256_storage) -> Self { + Self::new(p.avx) } } - impl MultiLane<[u32x4_sse2; 4]> for u32x4x4_avx2 { + impl StoreBytes for u32x4x2_avx2 { #[inline(always)] - fn to_lanes(self) -> [u32x4_sse2; 4] { + unsafe fn unsafe_read_le(input: &[u8]) -> Self { + assert_eq!(input.len(), 32); + Self::new(_mm256_loadu_si256(input.as_ptr() as *const _)) + } + #[inline(always)] + unsafe fn unsafe_read_be(input: &[u8]) -> Self { + Self::unsafe_read_le(input).bswap() + } + #[inline(always)] + fn write_le(self, out: &mut [u8]) { + unsafe { + assert_eq!(out.len(), 32); + _mm256_storeu_si256(out.as_mut_ptr() as *mut _, self.x) + } + } + #[inline(always)] + fn write_be(self, out: &mut [u8]) { + self.bswap().write_le(out) + } + } + impl MultiLane<[u32x4_sse2; 2]> for u32x4x2_avx2 { + #[inline(always)] + fn to_lanes(self) -> [u32x4_sse2; 2] { unsafe { [ - u32x4_sse2::new(_mm256_extracti128_si256(self.x[0], 0)), - u32x4_sse2::new(_mm256_extracti128_si256(self.x[0], 1)), - u32x4_sse2::new(_mm256_extracti128_si256(self.x[1], 0)), - u32x4_sse2::new(_mm256_extracti128_si256(self.x[1], 1)), + u32x4_sse2::new(_mm256_extracti128_si256(self.x, 0)), + u32x4_sse2::new(_mm256_extracti128_si256(self.x, 1)), ] } } #[inline(always)] - fn from_lanes(x: [u32x4_sse2; 4]) -> Self { + fn from_lanes(x: [u32x4_sse2; 2]) -> Self { Self::new(unsafe { - [ - _mm256_setr_m128i(x[0].x, x[1].x), - _mm256_setr_m128i(x[2].x, x[3].x), - ] + _mm256_setr_m128i(x[0].x, x[1].x) }) } } - impl Vec4> for u32x4x4_avx2 { + impl Vec2> for u32x4x2_avx2 { #[inline(always)] fn extract(self, i: u32) -> u32x4_sse2 { unsafe { match i { - 0 => u32x4_sse2::new(_mm256_extracti128_si256(self.x[0], 0)), - 1 => u32x4_sse2::new(_mm256_extracti128_si256(self.x[0], 1)), - 2 => u32x4_sse2::new(_mm256_extracti128_si256(self.x[1], 0)), - 3 => u32x4_sse2::new(_mm256_extracti128_si256(self.x[1], 1)), + 0 => u32x4_sse2::new(_mm256_extracti128_si256(self.x, 0)), + 1 => u32x4_sse2::new(_mm256_extracti128_si256(self.x, 1)), _ => panic!(), } } @@ -1454,95 +1459,21 @@ fn insert(self, w: u32x4_sse2, i: u32) -> Self { Self::new(unsafe { match i { - 0 => [_mm256_inserti128_si256(self.x[0], w.x, 0), self.x[1]], - 1 => [_mm256_inserti128_si256(self.x[0], w.x, 1), self.x[1]], - 2 => [self.x[0], _mm256_inserti128_si256(self.x[1], w.x, 0)], - 3 => [self.x[0], _mm256_inserti128_si256(self.x[1], w.x, 1)], + 0 => _mm256_inserti128_si256(self.x, w.x, 0), + 1 => _mm256_inserti128_si256(self.x, w.x, 1), _ => panic!(), } }) } } - impl Vec4Ext> for u32x4x4_avx2 { - #[inline(always)] - fn transpose4(a: Self, b: Self, c: Self, d: Self) -> (Self, Self, Self, Self) { - /* - * a00:a01 a10:a11 - * b00:b01 b10:b11 - * c00:c01 c10:c11 - * d00:d01 d10:d11 - * => - * a00:b00 c00:d00 - * a01:b01 c01:d01 - * a10:b10 c10:d10 - * a11:b11 c11:d11 - */ - unsafe { - let ab00 = _mm256_permute2x128_si256(a.x[0], b.x[0], 0x20); - let ab01 = _mm256_permute2x128_si256(a.x[0], b.x[0], 0x31); - let ab10 = _mm256_permute2x128_si256(a.x[1], b.x[1], 0x20); - let ab11 = _mm256_permute2x128_si256(a.x[1], b.x[1], 0x31); - let cd00 = _mm256_permute2x128_si256(c.x[0], d.x[0], 0x20); - let cd01 = _mm256_permute2x128_si256(c.x[0], d.x[0], 0x31); - let cd10 = _mm256_permute2x128_si256(c.x[1], d.x[1], 0x20); - let cd11 = _mm256_permute2x128_si256(c.x[1], d.x[1], 0x31); - ( - Self { x: [ab00, cd00], ni: a.ni }, - Self { x: [ab01, cd01], ni: a.ni }, - Self { x: [ab10, cd10], ni: a.ni }, - Self { x: [ab11, cd11], ni: a.ni }, - ) - } - } - } - impl Vector<[u32; 16]> for u32x4x4_avx2 { - #[inline(always)] - fn to_scalars(self) -> [u32; 16] { - unsafe { - core::mem::transmute(self) - } - } - } - impl LaneWords4 for u32x4x4_avx2 { - #[inline(always)] - fn shuffle_lane_words1230(self) -> Self { - Self::new(unsafe { - [ - _mm256_shuffle_epi32(self.x[0], 0b1001_0011), - _mm256_shuffle_epi32(self.x[1], 0b1001_0011), - ] - }) - } - #[inline(always)] - fn shuffle_lane_words2301(self) -> Self { - Self::new(unsafe { - [ - _mm256_shuffle_epi32(self.x[0], 0b0100_1110), - _mm256_shuffle_epi32(self.x[1], 0b0100_1110), - ] - }) - } - #[inline(always)] - fn shuffle_lane_words3012(self) -> Self { - Self::new(unsafe { - [ - _mm256_shuffle_epi32(self.x[0], 0b0011_1001), - _mm256_shuffle_epi32(self.x[1], 0b0011_1001), - ] - }) - } - } - impl BitOps32 for u32x4x4_avx2 where NI: Copy {} - impl ArithOps for u32x4x4_avx2 where NI: Copy {} + impl BitOps32 for u32x4x2_avx2 where NI: Copy {} + impl ArithOps for u32x4x2_avx2 where NI: Copy {} macro_rules! shuf_lane_bytes { ($name:ident, $k0:expr, $k1:expr) => { #[inline(always)] fn $name(self) -> Self { Self::new(unsafe { - [ - _mm256_shuffle_epi8(self.x[0], _mm256_set_epi64x($k0, $k1, $k0, $k1)), - _mm256_shuffle_epi8(self.x[1], _mm256_set_epi64x($k0, $k1, $k0, $k1)), - ] + _mm256_shuffle_epi8(self.x, _mm256_set_epi64x($k0, $k1, $k0, $k1)) }) } }; @@ -1552,21 +1483,15 @@ #[inline(always)] fn $name(self) -> Self { Self::new(unsafe { - [ - _mm256_or_si256( - _mm256_srli_epi32(self.x[0], $i as i32), - _mm256_slli_epi32(self.x[0], 32 - $i as i32), - ), - _mm256_or_si256( - _mm256_srli_epi32(self.x[1], $i as i32), - _mm256_slli_epi32(self.x[1], 32 - $i as i32), - ), - ] + _mm256_or_si256( + _mm256_srli_epi32(self.x, $i as i32), + _mm256_slli_epi32(self.x, 32 - $i as i32), + ) }) } }; } - impl RotateEachWord32 for u32x4x4_avx2 { + impl RotateEachWord32 for u32x4x2_avx2 { rotr_32!(rotate_each_word_right7, 7); shuf_lane_bytes!( rotate_each_word_right8, @@ -1588,15 +1513,12 @@ ); rotr_32!(rotate_each_word_right25, 25); } - impl BitOps0 for u32x4x4_avx2 where NI: Copy {} - impl From> for vec512_storage { + impl BitOps0 for u32x4x2_avx2 where NI: Copy {} + impl From> for vec256_storage { #[inline(always)] - fn from(x: u32x4x4_avx2) -> Self { + fn from(x: u32x4x2_avx2) -> Self { Self { - avx: [ - vec256_storage { avx: x.x[0] }, - vec256_storage { avx: x.x[1] }, - ], + avx: x.x, } } } @@ -1614,55 +1536,182 @@ } }; } - impl_assign!(u32x4x4_avx2, BitXorAssign, bitxor_assign, bitxor); - impl_assign!(u32x4x4_avx2, BitOrAssign, bitor_assign, bitor); - impl_assign!(u32x4x4_avx2, BitAndAssign, bitand_assign, bitand); - impl_assign!(u32x4x4_avx2, AddAssign, add_assign, add); + impl_assign!(u32x4x2_avx2, BitXorAssign, bitxor_assign, bitxor); + impl_assign!(u32x4x2_avx2, BitOrAssign, bitor_assign, bitor); + impl_assign!(u32x4x2_avx2, BitAndAssign, bitand_assign, bitand); + impl_assign!(u32x4x2_avx2, AddAssign, add_assign, add); - macro_rules! impl_bitop_x2 { + macro_rules! impl_bitop { ($vec:ident, $Op:ident, $op_fn:ident, $impl_fn:ident) => { impl $Op for $vec { type Output = Self; #[inline(always)] fn $op_fn(self, rhs: Self) -> Self::Output { Self::new(unsafe { - [$impl_fn(self.x[0], rhs.x[0]), $impl_fn(self.x[1], rhs.x[1])] + $impl_fn(self.x, rhs.x) }) } } }; } - impl_bitop_x2!(u32x4x4_avx2, BitXor, bitxor, _mm256_xor_si256); - impl_bitop_x2!(u32x4x4_avx2, BitOr, bitor, _mm256_or_si256); - impl_bitop_x2!(u32x4x4_avx2, BitAnd, bitand, _mm256_and_si256); - impl_bitop_x2!(u32x4x4_avx2, AndNot, andnot, _mm256_andnot_si256); - impl_bitop_x2!(u32x4x4_avx2, Add, add, _mm256_add_epi32); + impl_bitop!(u32x4x2_avx2, BitXor, bitxor, _mm256_xor_si256); + impl_bitop!(u32x4x2_avx2, BitOr, bitor, _mm256_or_si256); + impl_bitop!(u32x4x2_avx2, BitAnd, bitand, _mm256_and_si256); + impl_bitop!(u32x4x2_avx2, AndNot, andnot, _mm256_andnot_si256); + impl_bitop!(u32x4x2_avx2, Add, add, _mm256_add_epi32); - impl Not for u32x4x4_avx2 { + impl Not for u32x4x2_avx2 { type Output = Self; #[inline(always)] fn not(self) -> Self::Output { unsafe { let f = _mm256_set1_epi8(-0x7f); - Self::new([f, f]) ^ self + Self::new(f) ^ self } } } - impl BSwap for u32x4x4_avx2 { + impl BSwap for u32x4x2_avx2 { shuf_lane_bytes!(bswap, 0x0c0d_0e0f_0809_0a0b, 0x0405_0607_0001_0203); } - impl From>> for u32x4x4_avx2 + impl From, G0>> for u32x4x2_avx2 where NI: Copy, { #[inline(always)] + fn from(x: x2, G0>) -> Self { + Self::new(unsafe { + _mm256_setr_m128i(x.0[0].x, x.0[1].x) + }) + } + } + + impl LaneWords4 for u32x4x2_avx2 { + #[inline(always)] + fn shuffle_lane_words1230(self) -> Self { + Self::new(unsafe { + _mm256_shuffle_epi32(self.x, 0b1001_0011) + }) + } + #[inline(always)] + fn shuffle_lane_words2301(self) -> Self { + Self::new(unsafe { + _mm256_shuffle_epi32(self.x, 0b0100_1110) + }) + } + #[inline(always)] + fn shuffle_lane_words3012(self) -> Self { + Self::new(unsafe { + _mm256_shuffle_epi32(self.x, 0b0011_1001) + }) + } + } + + /////////////////////////////////////////////////////////////////////////////////////////// + + pub type u32x4x4_avx2 = x2, G0>; + impl u32x4x4> for u32x4x4_avx2 {} + + impl Store for u32x4x4_avx2 { + #[inline(always)] + unsafe fn unpack(p: vec512_storage) -> Self { + Self::new([u32x4x2_avx2::unpack(p.avx[0]), u32x4x2_avx2::unpack(p.avx[1])]) + } + } + impl MultiLane<[u32x4_sse2; 4]> for u32x4x4_avx2 { + #[inline(always)] + fn to_lanes(self) -> [u32x4_sse2; 4] { + let [a, b] = self.0[0].to_lanes(); + let [c, d] = self.0[1].to_lanes(); + [a, b, c, d] + } + #[inline(always)] + fn from_lanes(x: [u32x4_sse2; 4]) -> Self { + let ab = u32x4x2_avx2::from_lanes([x[0], x[1]]); + let cd = u32x4x2_avx2::from_lanes([x[2], x[3]]); + Self::new([ab, cd]) + } + } + impl Vec4> for u32x4x4_avx2 { + #[inline(always)] + fn extract(self, i: u32) -> u32x4_sse2 { + match i { + 0 => self.0[0].extract(0), + 1 => self.0[0].extract(1), + 2 => self.0[1].extract(0), + 3 => self.0[1].extract(1), + _ => panic!(), + } + } + #[inline(always)] + fn insert(self, w: u32x4_sse2, i: u32) -> Self { + Self::new(match i { + 0 | 1 => [self.0[0].insert(w, i), self.0[1]], + 2 | 3 => [self.0[0], self.0[1].insert(w, i - 2)], + _ => panic!(), + }) + } + } + impl Vec4Ext> for u32x4x4_avx2 { + #[inline(always)] + fn transpose4(a: Self, b: Self, c: Self, d: Self) -> (Self, Self, Self, Self) { + /* + * a00:a01 a10:a11 + * b00:b01 b10:b11 + * c00:c01 c10:c11 + * d00:d01 d10:d11 + * => + * a00:b00 c00:d00 + * a01:b01 c01:d01 + * a10:b10 c10:d10 + * a11:b11 c11:d11 + */ + unsafe { + let ab00 = u32x4x2_avx2::new(_mm256_permute2x128_si256(a.0[0].x, b.0[0].x, 0x20)); + let ab01 = u32x4x2_avx2::new(_mm256_permute2x128_si256(a.0[0].x, b.0[0].x, 0x31)); + let ab10 = u32x4x2_avx2::new(_mm256_permute2x128_si256(a.0[1].x, b.0[1].x, 0x20)); + let ab11 = u32x4x2_avx2::new(_mm256_permute2x128_si256(a.0[1].x, b.0[1].x, 0x31)); + let cd00 = u32x4x2_avx2::new(_mm256_permute2x128_si256(c.0[0].x, d.0[0].x, 0x20)); + let cd01 = u32x4x2_avx2::new(_mm256_permute2x128_si256(c.0[0].x, d.0[0].x, 0x31)); + let cd10 = u32x4x2_avx2::new(_mm256_permute2x128_si256(c.0[1].x, d.0[1].x, 0x20)); + let cd11 = u32x4x2_avx2::new(_mm256_permute2x128_si256(c.0[1].x, d.0[1].x, 0x31)); + ( + Self::new([ab00, cd00]), + Self::new([ab01, cd01]), + Self::new([ab10, cd10]), + Self::new([ab11, cd11]), + ) + } + } + } + impl Vector<[u32; 16]> for u32x4x4_avx2 { + #[inline(always)] + fn to_scalars(self) -> [u32; 16] { + unsafe { + core::mem::transmute(self) + } + } + } + impl From> for vec512_storage { + #[inline(always)] + fn from(x: u32x4x4_avx2) -> Self { + Self { + avx: [ + vec256_storage { avx: x.0[0].x }, + vec256_storage { avx: x.0[1].x }, + ], + } + } + } + impl From>> for u32x4x4_avx2 + { + #[inline(always)] fn from(x: x4>) -> Self { Self::new(unsafe { [ - _mm256_setr_m128i(x.0[0].x, x.0[1].x), - _mm256_setr_m128i(x.0[2].x, x.0[3].x), + u32x4x2_avx2::new(_mm256_setr_m128i(x.0[0].x, x.0[1].x)), + u32x4x2_avx2::new(_mm256_setr_m128i(x.0[2].x, x.0[3].x)), ] }) } diff -Nru cargo-0.53.0/vendor/proc-macro2/build.rs cargo-0.54.0/vendor/proc-macro2/build.rs --- cargo-0.53.0/vendor/proc-macro2/build.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/build.rs 2021-10-26 21:08:52.000000000 +0000 @@ -60,7 +60,7 @@ let semver_exempt = cfg!(procmacro2_semver_exempt); if semver_exempt { - // https://github.com/alexcrichton/proc-macro2/issues/147 + // https://github.com/dtolnay/proc-macro2/issues/147 println!("cargo:rustc-cfg=procmacro2_semver_exempt"); } diff -Nru cargo-0.53.0/vendor/proc-macro2/.cargo-checksum.json cargo-0.54.0/vendor/proc-macro2/.cargo-checksum.json --- cargo-0.53.0/vendor/proc-macro2/.cargo-checksum.json 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/.cargo-checksum.json 2021-10-26 21:08:52.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"} \ No newline at end of file +{"files":{},"package":"ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43"} \ No newline at end of file diff -Nru cargo-0.53.0/vendor/proc-macro2/Cargo.toml cargo-0.54.0/vendor/proc-macro2/Cargo.toml --- cargo-0.53.0/vendor/proc-macro2/Cargo.toml 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/Cargo.toml 2021-10-26 21:08:52.000000000 +0000 @@ -13,15 +13,15 @@ edition = "2018" rust-version = "1.31" name = "proc-macro2" -version = "1.0.30" -authors = ["Alex Crichton ", "David Tolnay "] +version = "1.0.32" +authors = ["David Tolnay ", "Alex Crichton "] description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n" documentation = "https://docs.rs/proc-macro2" readme = "README.md" keywords = ["macros"] categories = ["development-tools::procedural-macro-helpers"] license = "MIT OR Apache-2.0" -repository = "https://github.com/alexcrichton/proc-macro2" +repository = "https://github.com/dtolnay/proc-macro2" [package.metadata.docs.rs] rustc-args = ["--cfg", "procmacro2_semver_exempt"] rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"] diff -Nru cargo-0.53.0/vendor/proc-macro2/README.md cargo-0.54.0/vendor/proc-macro2/README.md --- cargo-0.53.0/vendor/proc-macro2/README.md 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/README.md 2021-10-26 21:08:52.000000000 +0000 @@ -1,8 +1,9 @@ # proc-macro2 -[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions) -[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2) -[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2) +[github](https://github.com/dtolnay/proc-macro2) +[crates.io](https://crates.io/crates/proc-macro2) +[docs.rs](https://docs.rs/proc-macro2) +[build status](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster) A wrapper around the procedural macro API of the compiler's `proc_macro` crate. This library serves two purposes: diff -Nru cargo-0.53.0/vendor/proc-macro2/src/fallback.rs cargo-0.54.0/vendor/proc-macro2/src/fallback.rs --- cargo-0.53.0/vendor/proc-macro2/src/fallback.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/src/fallback.rs 2021-10-26 21:08:52.000000000 +0000 @@ -65,7 +65,7 @@ } fn push_token(&mut self, token: TokenTree) { - // https://github.com/alexcrichton/proc-macro2/issues/235 + // https://github.com/dtolnay/proc-macro2/issues/235 match token { #[cfg(not(no_bind_by_move_pattern_guard))] TokenTree::Literal(crate::Literal { @@ -896,10 +896,20 @@ impl FromStr for Literal { type Err = LexError; - fn from_str(repr: &str) -> Result { + fn from_str(mut repr: &str) -> Result { + let negative = repr.starts_with('-'); + if negative { + repr = &repr[1..]; + if !repr.starts_with(|ch: char| ch.is_ascii_digit()) { + return Err(LexError::call_site()); + } + } let cursor = get_cursor(repr); - if let Ok((_rest, literal)) = parse::literal(cursor) { + if let Ok((_rest, mut literal)) = parse::literal(cursor) { if literal.text.len() == repr.len() { + if negative { + literal.text.insert(0, '-'); + } return Ok(literal); } } diff -Nru cargo-0.53.0/vendor/proc-macro2/src/lib.rs cargo-0.54.0/vendor/proc-macro2/src/lib.rs --- cargo-0.53.0/vendor/proc-macro2/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/src/lib.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1,3 +1,11 @@ +//! [![github]](https://github.com/dtolnay/proc-macro2) [![crates-io]](https://crates.io/crates/proc-macro2) [![docs-rs]](crate) +//! +//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github +//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust +//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K +//! +//!
+//! //! A wrapper around the procedural macro API of the compiler's [`proc_macro`] //! crate. This library serves two purposes: //! @@ -78,7 +86,7 @@ //! a different thread. // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.30")] +#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.32")] #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] #![cfg_attr(super_unstable, feature(proc_macro_def_site))] #![cfg_attr(doc_cfg, feature(doc_cfg))] diff -Nru cargo-0.53.0/vendor/proc-macro2/tests/test.rs cargo-0.54.0/vendor/proc-macro2/tests/test.rs --- cargo-0.53.0/vendor/proc-macro2/tests/test.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/proc-macro2/tests/test.rs 2021-10-26 21:08:52.000000000 +0000 @@ -168,7 +168,11 @@ #[test] fn literal_parse() { assert!("1".parse::().is_ok()); + assert!("-1".parse::().is_ok()); + assert!("-1u12".parse::().is_ok()); assert!("1.0".parse::().is_ok()); + assert!("-1.0".parse::().is_ok()); + assert!("-1.0f12".parse::().is_ok()); assert!("'a'".parse::().is_ok()); assert!("\"\n\"".parse::().is_ok()); assert!("0 1".parse::().is_err()); @@ -177,6 +181,9 @@ assert!("/* comment */0".parse::().is_err()); assert!("0/* comment */".parse::().is_err()); assert!("0// comment".parse::().is_err()); + assert!("- 1".parse::().is_err()); + assert!("- 1.0".parse::().is_err()); + assert!("-\"\"".parse::().is_err()); } #[test] diff -Nru cargo-0.53.0/vendor/syn/build.rs cargo-0.54.0/vendor/syn/build.rs --- cargo-0.53.0/vendor/syn/build.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/build.rs 2021-10-26 21:08:52.000000000 +0000 @@ -19,6 +19,10 @@ println!("cargo:rustc-cfg=syn_no_const_vec_new"); } + if compiler.minor < 56 { + println!("cargo:rustc-cfg=syn_no_negative_literal_parse"); + } + if !compiler.nightly { println!("cargo:rustc-cfg=syn_disable_nightly_tests"); } diff -Nru cargo-0.53.0/vendor/syn/.cargo-checksum.json cargo-0.54.0/vendor/syn/.cargo-checksum.json --- cargo-0.53.0/vendor/syn/.cargo-checksum.json 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/.cargo-checksum.json 2021-10-26 21:08:52.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"} \ No newline at end of file +{"files":{},"package":"f2afee18b8beb5a596ecb4a2dce128c719b4ba399d34126b9e4396e3f9860966"} \ No newline at end of file diff -Nru cargo-0.53.0/vendor/syn/Cargo.toml cargo-0.54.0/vendor/syn/Cargo.toml --- cargo-0.53.0/vendor/syn/Cargo.toml 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/Cargo.toml 2021-10-26 21:08:52.000000000 +0000 @@ -13,7 +13,7 @@ edition = "2018" rust-version = "1.31" name = "syn" -version = "1.0.80" +version = "1.0.81" authors = ["David Tolnay "] include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"] description = "Parser for Rust source code" @@ -39,7 +39,7 @@ name = "file" required-features = ["full", "parsing"] [dependencies.proc-macro2] -version = "1.0" +version = "1.0.32" default-features = false [dependencies.quote] diff -Nru cargo-0.53.0/vendor/syn/debian/patches/relax-deps.patch cargo-0.54.0/vendor/syn/debian/patches/relax-deps.patch --- cargo-0.53.0/vendor/syn/debian/patches/relax-deps.patch 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/debian/patches/relax-deps.patch 2021-10-26 21:08:52.000000000 +0000 @@ -1,15 +1,6 @@ --- a/Cargo.toml +++ b/Cargo.toml -@@ -38,7 +38,7 @@ required-features = ["full", "parsing"] - name = "file" - required-features = ["full", "parsing"] - [dependencies.proc-macro2] --version = "1.0.26" -+version = "1.0" - default-features = false - - [dependencies.quote] -@@ -67,11 +67,7 @@ version = "1.0" +@@ -68,11 +68,7 @@ version = "1.0" [dev-dependencies.reqwest] @@ -22,7 +13,7 @@ [dev-dependencies.tar] version = "0.4.16" -@@ -92,6 +88,5 @@ full = [] +@@ -93,6 +89,5 @@ parsing = [] printing = ["quote"] proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"] @@ -31,7 +22,7 @@ visit-mut = [] --- a/tests/repo/mod.rs +++ b/tests/repo/mod.rs -@@ -135,7 +135,7 @@ fn download_and_unpack() -> Result<()> { +@@ -127,7 +127,7 @@ "https://github.com/rust-lang/rust/archive/{}.tar.gz", REVISION ); diff -Nru cargo-0.53.0/vendor/syn/.pc/relax-deps.patch/Cargo.toml cargo-0.54.0/vendor/syn/.pc/relax-deps.patch/Cargo.toml --- cargo-0.53.0/vendor/syn/.pc/relax-deps.patch/Cargo.toml 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/.pc/relax-deps.patch/Cargo.toml 2021-10-26 21:08:52.000000000 +0000 @@ -13,7 +13,7 @@ edition = "2018" rust-version = "1.31" name = "syn" -version = "1.0.80" +version = "1.0.81" authors = ["David Tolnay "] include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"] description = "Parser for Rust source code" @@ -39,7 +39,7 @@ name = "file" required-features = ["full", "parsing"] [dependencies.proc-macro2] -version = "1.0.26" +version = "1.0.32" default-features = false [dependencies.quote] diff -Nru cargo-0.53.0/vendor/syn/src/lib.rs cargo-0.54.0/vendor/syn/src/lib.rs --- cargo-0.53.0/vendor/syn/src/lib.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/src/lib.rs 2021-10-26 21:08:52.000000000 +0000 @@ -250,7 +250,7 @@ //! dynamic library libproc_macro from rustc toolchain. // Syn types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/syn/1.0.80")] +#![doc(html_root_url = "https://docs.rs/syn/1.0.81")] #![cfg_attr(doc_cfg, feature(doc_cfg))] #![allow(non_camel_case_types)] // Ignored clippy lints. @@ -824,6 +824,7 @@ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] mod print; +#[cfg(any(feature = "full", feature = "derive"))] use crate::__private::private; //////////////////////////////////////////////////////////////////////////////// diff -Nru cargo-0.53.0/vendor/syn/src/lit.rs cargo-0.54.0/vendor/syn/src/lit.rs --- cargo-0.53.0/vendor/syn/src/lit.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/src/lit.rs 2021-10-26 21:08:52.000000000 +0000 @@ -925,7 +925,6 @@ mod value { use super::*; use crate::bigint::BigInt; - use proc_macro2::TokenStream; use std::char; use std::ops::{Index, RangeFrom}; @@ -1540,35 +1539,37 @@ } } + #[allow(clippy::unnecessary_wraps)] pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option { - if repr.starts_with('-') { - let f64_parse_finite = || digits.parse().ok().filter(|x: &f64| x.is_finite()); - let f32_parse_finite = || digits.parse().ok().filter(|x: &f32| x.is_finite()); - if suffix == "f64" { - f64_parse_finite().map(Literal::f64_suffixed) - } else if suffix == "f32" { - f32_parse_finite().map(Literal::f32_suffixed) - } else if suffix == "i64" { - digits.parse().ok().map(Literal::i64_suffixed) - } else if suffix == "i32" { - digits.parse().ok().map(Literal::i32_suffixed) - } else if suffix == "i16" { - digits.parse().ok().map(Literal::i16_suffixed) - } else if suffix == "i8" { - digits.parse().ok().map(Literal::i8_suffixed) - } else if !suffix.is_empty() { - None - } else if digits.contains('.') { - f64_parse_finite().map(Literal::f64_unsuffixed) - } else { - digits.parse().ok().map(Literal::i64_unsuffixed) - } - } else { - let stream = repr.parse::().unwrap(); - match stream.into_iter().next().unwrap() { - TokenTree::Literal(l) => Some(l), - _ => unreachable!(), - } - } + #[cfg(syn_no_negative_literal_parse)] + { + // Rustc older than https://github.com/rust-lang/rust/pull/87262. + if repr.starts_with('-') { + let f64_parse_finite = || digits.parse().ok().filter(|x: &f64| x.is_finite()); + let f32_parse_finite = || digits.parse().ok().filter(|x: &f32| x.is_finite()); + return if suffix == "f64" { + f64_parse_finite().map(Literal::f64_suffixed) + } else if suffix == "f32" { + f32_parse_finite().map(Literal::f32_suffixed) + } else if suffix == "i64" { + digits.parse().ok().map(Literal::i64_suffixed) + } else if suffix == "i32" { + digits.parse().ok().map(Literal::i32_suffixed) + } else if suffix == "i16" { + digits.parse().ok().map(Literal::i16_suffixed) + } else if suffix == "i8" { + digits.parse().ok().map(Literal::i8_suffixed) + } else if !suffix.is_empty() { + None + } else if digits.contains('.') { + f64_parse_finite().map(Literal::f64_unsuffixed) + } else { + digits.parse().ok().map(Literal::i64_unsuffixed) + }; + } + } + let _ = digits; + let _ = suffix; + Some(repr.parse::().unwrap()) } } diff -Nru cargo-0.53.0/vendor/syn/tests/debug/mod.rs cargo-0.54.0/vendor/syn/tests/debug/mod.rs --- cargo-0.53.0/vendor/syn/tests/debug/mod.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/tests/debug/mod.rs 2021-10-26 21:08:52.000000000 +0000 @@ -1,4 +1,8 @@ -#![allow(clippy::too_many_lines, clippy::used_underscore_binding)] +#![allow( + clippy::no_effect_underscore_binding, + clippy::too_many_lines, + clippy::used_underscore_binding +)] mod gen; diff -Nru cargo-0.53.0/vendor/syn/tests/test_lit.rs cargo-0.54.0/vendor/syn/tests/test_lit.rs --- cargo-0.53.0/vendor/syn/tests/test_lit.rs 2021-10-21 23:54:55.000000000 +0000 +++ cargo-0.54.0/vendor/syn/tests/test_lit.rs 2021-10-26 21:08:52.000000000 +0000 @@ -216,12 +216,6 @@ } #[test] -fn negative_overflow() { - assert!(syn::parse_str::("-1.0e99f64").is_ok()); - assert!(syn::parse_str::("-1.0e999f64").is_err()); -} - -#[test] fn suffix() { fn get_suffix(token: &str) -> String { let lit = syn::parse_str::(token).unwrap();