diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/benches/capture/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/benches/capture/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/benches/capture/Cargo.toml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/benches/capture/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -9,4 +9,4 @@ cargo_metadata = "0.14.0" flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } tar = { version = "0.4.38", default-features = false } -toml_edit = { version = "0.14.3", features = ["serde", "easy", "perf"] } +toml_edit = { version = "0.15.0", features = ["serde", "easy", "perf"] } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/Cargo.toml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,6 @@ [package] name = "cargo" -version = "0.66.0" +version = "0.67.1" edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://crates.io" @@ -17,6 +17,7 @@ [dependencies] atty = "0.2" +base64 = "0.13.1" bytesize = "1.0" cargo-platform = { path = "crates/cargo-platform", version = "0.1.2" } cargo-util = { path = "crates/cargo-util", version = "0.2.1" } @@ -28,10 +29,11 @@ anyhow = "1.0" filetime = "0.2.9" flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } -git2 = "0.15.0" -git2-curl = "0.16.0" +git2 = "0.16.0" +git2-curl = "0.17.0" glob = "0.3.0" hex = "0.4" +hmac = "0.12.1" home = "0.5" humantime = "2.0.0" indexmap = "1" @@ -41,7 +43,7 @@ lazycell = "1.2.0" libc = "0.2" log = "0.4.6" -libgit2-sys = "0.14.0" +libgit2-sys = "0.14.1" memchr = "2.1.3" opener = "0.5" os_info = "3.5.0" @@ -52,16 +54,18 @@ serde = { version = "1.0.123", features = ["derive"] } serde_ignored = "0.1.0" serde_json = { version = "1.0.30", features = ["raw_value"] } +serde-value = "0.7.0" +sha1 = "0.10.5" shell-escape = "0.1.4" strip-ansi-escapes = "0.1.0" tar = { version = "0.4.38", default-features = false } tempfile = "3.0" termcolor = "1.1" -toml_edit = { version = "0.14.3", features = ["serde", "easy", "perf"] } +toml_edit = { version = "0.15.0", features = ["serde", "easy", "perf"] } unicode-xid = "0.2.0" url = "2.2.2" walkdir = "2.2" -clap = "3.2.18" +clap = "4.0.15" unicode-width = "0.1.5" openssl = { version = '0.10.11', optional = true } im-rc = "15.0.0" @@ -99,6 +103,7 @@ [dev-dependencies] cargo-test-macro = { path = "crates/cargo-test-macro" } cargo-test-support = { path = "crates/cargo-test-support" } +same-file = "1.0.6" snapbox = { version = "0.3.0", features = ["diff", "path"] } [build-dependencies] @@ -113,4 +118,5 @@ [features] deny-warnings = [] vendored-openssl = ["openssl/vendored"] +vendored-libgit2 = ["libgit2-sys/vendored"] pretty-env-logger = ["pretty_env_logger"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/CHANGELOG.md cargo-0.67.1+ds0ubuntu0.libgit2/CHANGELOG.md --- cargo-0.66.0+ds0ubuntu0.libgit2/CHANGELOG.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/CHANGELOG.md 2023-01-10 13:41:19.000000000 +0000 @@ -1,20 +1,93 @@ # Changelog +## Cargo 1.66.1 (2023-01-10) + +### Fixed +- [CVE-2022-46176](https://github.com/rust-lang/cargo/security/advisories/GHSA-r5w3-xm58-jv6j): + Added validation of SSH host keys for git URLs. + See [the docs](https://doc.rust-lang.org/cargo/appendix/git-authentication.html#ssh-known-hosts) for more information on how to configure the known host keys. + + +## Cargo 1.66 (2022-12-15) +[08250398...HEAD](https://github.com/rust-lang/cargo/compare/08250398...HEAD) + +### Added + +### Changed + +### Fixed + +### Nightly only + ## Cargo 1.65 (2022-11-03) -[4fd148c4...HEAD](https://github.com/rust-lang/cargo/compare/4fd148c4...HEAD) +[4fd148c4...rust-1.65.0](https://github.com/rust-lang/cargo/compare/4fd148c4...rust-1.65.0) ### Added +- External subcommands can now inherit jobserver file descriptors from Cargo. + [#10511](https://github.com/rust-lang/cargo/pull/10511) +- Added an API documentation for private items in cargo-the-library. See + . + [#11019](https://github.com/rust-lang/cargo/pull/11019) + ### Changed +- Cargo now stops adding its bin path to `PATH` if it's already there. + [#11023](https://github.com/rust-lang/cargo/pull/11023) +- Improved the performance of Cargo build scheduling + by sorting the queue of pending jobs. + [#11032](https://github.com/rust-lang/cargo/pull/11032) +- Improved the performance fetching git dependencies from GitHub even + when using a partial hash in the `rev` field. + [#10807](https://github.com/rust-lang/cargo/pull/10807) +- Cargo now uses git2 v0.15 and libgit2-sys v0.14, + which bring several compatibility fixes with git's new behaviors. + [#11004](https://github.com/rust-lang/cargo/pull/11004) +- Registry index files are cached in a more granular way based on content hash. + [#11044](https://github.com/rust-lang/cargo/pull/11044) +- Cargo now uses the standard library's `std::thread::scope` instead of the + `crossbeam` crate for spawning scoped threads. + [#10977](https://github.com/rust-lang/cargo/pull/10977) - Cargo now uses the standard library's `available_parallelism` instead of the `num_cpus` crate for determining the default parallelism. [#10969](https://github.com/rust-lang/cargo/pull/10969) +- Cargo now guides you how to solve it when seeing an error message of + `rust-version` requirement not satisfied. + [#10891](https://github.com/rust-lang/cargo/pull/10891) +- Cargo now tells you more about possible causes and how to fix it + when a subcommand cannot be found. + [#10924](https://github.com/rust-lang/cargo/pull/10924) +- Cargo now lists available target names when a given Cargo target cannot be found. + [#10999](https://github.com/rust-lang/cargo/pull/10999) +- `cargo update` now warns if `--precise` is given without `--package` flag. + This will become a hard error after a transition period. + [#10988](https://github.com/rust-lang/cargo/pull/10988) + [#11011](https://github.com/rust-lang/cargo/pull/11011) +- `cargo bench` and `cargo test` now report a more precise test execution error + right after a test fails. + [#11028](https://github.com/rust-lang/cargo/pull/11028) +- `cargo add` now tells you for which version the features are added. + [#11075](https://github.com/rust-lang/cargo/pull/11075) +- Call out that non-ASCII crate names are not supported by Rust anymore. + [#11017](https://github.com/rust-lang/cargo/pull/11017) +- Enhanced the error message when in the manifest a field is expected to be + an array but a string is used. + [#10944](https://github.com/rust-lang/cargo/pull/10944) ### Fixed -### Nightly only +- Removed the restriction on file locking supports on platforms other than Linux. + [#10975](https://github.com/rust-lang/cargo/pull/10975) +- Fixed incorrect OS detection by bumping os_info to 3.5.0. + [#10943](https://github.com/rust-lang/cargo/pull/10943) +- Scanning the package directory now ignores errors from broken + but excluded symlink files. + [#11008](https://github.com/rust-lang/cargo/pull/11008) +### Nightly + +- Progress indicator for sparse registries becomes more straightfoward. + [#11068](https://github.com/rust-lang/cargo/pull/11068) ## Cargo 1.64 (2022-09-22) [a5e08c47...rust-1.64.0](https://github.com/rust-lang/cargo/compare/a5e08c47...rust-1.64.0) @@ -23,9 +96,9 @@ - 🎉 Packages can now inherit settings from the workspace so that the settings can be centralized in one place. See - [`workspace.package`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-workspacepackage-table) + [`workspace.package`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-package-table) and - [`workspace.dependencies`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-workspacedependencies-table) + [`workspace.dependencies`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-dependencies-table) for more details on how to define these common settings. [#10859](https://github.com/rust-lang/cargo/pull/10859) - Added the @@ -64,6 +137,15 @@ [#10784](https://github.com/rust-lang/cargo/pull/10784) ### Fixed + +- [CVE-2022-36113](https://github.com/rust-lang/cargo/security/advisories/GHSA-rfj2-q3h3-hm5j): + Extracting malicious crates can corrupt arbitrary files. + [#11089](https://github.com/rust-lang/cargo/pull/11089) + [#11088](https://github.com/rust-lang/cargo/pull/11088) +- [CVE-2022-36114](https://github.com/rust-lang/cargo/security/advisories/GHSA-2hvr-h6gw-qrxp): + Extracting malicious crates can fill the file system. + [#11089](https://github.com/rust-lang/cargo/pull/11089) + [#11088](https://github.com/rust-lang/cargo/pull/11088) - The `os` output in `cargo --version --verbose` now supports more platforms. [#10802](https://github.com/rust-lang/cargo/pull/10802) - Cached git checkouts will now be rebuilt if they are corrupted. This may diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/Cargo.toml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -11,10 +11,11 @@ anyhow = "1.0.34" cargo-test-macro = { path = "../cargo-test-macro" } cargo-util = { path = "../cargo-util" } +crates-io = { path = "../crates-io" } snapbox = { version = "0.3.0", features = ["diff", "path"] } filetime = "0.2" flate2 = { version = "1.0", default-features = false, features = ["zlib"] } -git2 = "0.15.0" +git2 = "0.16.0" glob = "0.3" itertools = "0.10.0" lazy_static = "1.0" @@ -22,7 +23,7 @@ serde_json = "1.0" tar = { version = "0.4.38", default-features = false } termcolor = "1.1.2" -toml_edit = { version = "0.14.3", features = ["serde", "easy", "perf"] } +toml_edit = { version = "0.15.0", features = ["serde", "easy", "perf"] } url = "2.2.2" [target.'cfg(windows)'.dependencies] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/compare.rs cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/compare.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/compare.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/compare.rs 2023-01-10 13:41:19.000000000 +0000 @@ -197,6 +197,7 @@ ("[MIGRATING]", " Migrating"), ("[EXECUTABLE]", " Executable"), ("[SKIPPING]", " Skipping"), + ("[WAITING]", " Waiting"), ]; let mut result = input.to_owned(); for &(pat, subst) in ¯os { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/lib.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -818,6 +818,17 @@ self } + /// Overrides the crates.io URL for testing. + /// + /// Can be used for testing crates-io functionality where alt registries + /// cannot be used. + pub fn replace_crates_io(&mut self, url: &Url) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + p.env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS", url.as_str()); + } + self + } + pub fn enable_mac_dsym(&mut self) -> &mut Self { if cfg!(target_os = "macos") { self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/publish.rs cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/publish.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/publish.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/publish.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,8 @@ use crate::compare::{assert_match_exact, find_json_mismatch}; -use crate::registry::{self, alt_api_path}; +use crate::registry::{self, alt_api_path, FeatureMap}; use flate2::read::GzDecoder; use std::collections::{HashMap, HashSet}; +use std::fs; use std::fs::File; use std::io::{self, prelude::*, SeekFrom}; use std::path::{Path, PathBuf}; @@ -155,3 +156,90 @@ } } } + +pub(crate) fn create_index_line( + name: serde_json::Value, + vers: &str, + deps: Vec, + cksum: &str, + features: crate::registry::FeatureMap, + yanked: bool, + links: Option, + v: Option, +) -> String { + // This emulates what crates.io does to retain backwards compatibility. + let (features, features2) = split_index_features(features.clone()); + let mut json = serde_json::json!({ + "name": name, + "vers": vers, + "deps": deps, + "cksum": cksum, + "features": features, + "yanked": yanked, + "links": links, + }); + if let Some(f2) = &features2 { + json["features2"] = serde_json::json!(f2); + json["v"] = serde_json::json!(2); + } + if let Some(v) = v { + json["v"] = serde_json::json!(v); + } + + json.to_string() +} + +pub(crate) fn write_to_index(registry_path: &PathBuf, name: &str, line: String, local: bool) { + let file = cargo_util::registry::make_dep_path(name, false); + + // Write file/line in the index. + let dst = if local { + registry_path.join("index").join(&file) + } else { + registry_path.join(&file) + }; + let prev = fs::read_to_string(&dst).unwrap_or_default(); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(fs::write(&dst, prev + &line[..] + "\n")); + + // Add the new file to the index. + if !local { + let repo = t!(git2::Repository::open(®istry_path)); + let mut index = t!(repo.index()); + t!(index.add_path(Path::new(&file))); + t!(index.write()); + let id = t!(index.write_tree()); + + // Commit this change. + let tree = t!(repo.find_tree(id)); + let sig = t!(repo.signature()); + let parent = t!(repo.refname_to_id("refs/heads/master")); + let parent = t!(repo.find_commit(parent)); + t!(repo.commit( + Some("HEAD"), + &sig, + &sig, + "Another commit", + &tree, + &[&parent] + )); + } +} + +fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option) { + let mut features2 = FeatureMap::new(); + for (feat, values) in features.iter_mut() { + if values + .iter() + .any(|value| value.starts_with("dep:") || value.contains("?/")) + { + let new_values = values.drain(..).collect(); + features2.insert(feat.clone(), new_values); + } + } + if features2.is_empty() { + (features, None) + } else { + (features, Some(features2)) + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-test-support/src/registry.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-test-support/src/registry.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,14 +1,16 @@ use crate::git::repo; use crate::paths; +use crate::publish::{create_index_line, write_to_index}; use cargo_util::paths::append; -use cargo_util::{registry::make_dep_path, Sha256}; +use cargo_util::Sha256; use flate2::write::GzEncoder; use flate2::Compression; use std::collections::{BTreeMap, HashMap}; +use std::fmt; use std::fs::{self, File}; -use std::io::{BufRead, BufReader, Write}; +use std::io::{BufRead, BufReader, Read, Write}; use std::net::{SocketAddr, TcpListener, TcpStream}; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::thread; use tar::{Builder, Header}; use url::Url; @@ -70,7 +72,7 @@ /// Write the registry in configuration. configure_registry: bool, /// API responders. - custom_responders: HashMap<&'static str, Box Response>>, + custom_responders: HashMap<&'static str, Box Response>>, } pub struct TestRegistry { @@ -103,7 +105,7 @@ pub fn new() -> RegistryBuilder { RegistryBuilder { alternative: None, - token: Some("api-token".to_string()), + token: None, http_api: false, http_index: false, api: true, @@ -115,7 +117,7 @@ /// Adds a custom HTTP response for a specific url #[must_use] - pub fn add_responder Response>( + pub fn add_responder Response>( mut self, url: &'static str, responder: R, @@ -195,6 +197,7 @@ let dl_url = generate_url(&format!("{prefix}dl")); let dl_path = generate_path(&format!("{prefix}dl")); let api_path = generate_path(&format!("{prefix}api")); + let token = Some(self.token.unwrap_or_else(|| format!("{prefix}sekrit"))); let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api { // No need to start the HTTP server. @@ -203,7 +206,7 @@ let server = HttpServer::new( registry_path.clone(), dl_path, - self.token.clone(), + token.clone(), self.custom_responders, ); let index_url = if self.http_index { @@ -226,7 +229,7 @@ _server: server, dl_url, path: registry_path, - token: self.token, + token, }; if self.configure_registry { @@ -250,8 +253,8 @@ [source.crates-io] replace-with = 'dummy-registry' - [source.dummy-registry] - registry = '{}'", + [registries.dummy-registry] + index = '{}'", registry.index_url ) .as_bytes(), @@ -388,7 +391,7 @@ v: Option, } -type FeatureMap = BTreeMap>; +pub(crate) type FeatureMap = BTreeMap>; #[derive(Clone)] pub struct Dependency { @@ -403,10 +406,17 @@ optional: bool, } +/// Entry with data that corresponds to [`tar::EntryType`]. +#[non_exhaustive] +enum EntryData { + Regular(String), + Symlink(PathBuf), +} + /// A file to be created in a package. struct PackageFile { path: String, - contents: String, + contents: EntryData, /// The Unix mode for the file. Note that when extracted on Windows, this /// is mostly ignored since it doesn't have the same style of permissions. mode: u32, @@ -459,15 +469,28 @@ } /// Request to the test http server -#[derive(Debug)] pub struct Request { pub url: Url, pub method: String, + pub body: Option>, pub authorization: Option, pub if_modified_since: Option, pub if_none_match: Option, } +impl fmt::Debug for Request { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // body is not included as it can produce long debug outputs + f.debug_struct("Request") + .field("url", &self.url) + .field("method", &self.method) + .field("authorization", &self.authorization) + .field("if_modified_since", &self.if_modified_since) + .field("if_none_match", &self.if_none_match) + .finish() + } +} + /// Response from the test http server pub struct Response { pub code: u32, @@ -475,12 +498,12 @@ pub body: Vec, } -struct HttpServer { +pub struct HttpServer { listener: TcpListener, registry_path: PathBuf, dl_path: PathBuf, token: Option, - custom_responders: HashMap<&'static str, Box Response>>, + custom_responders: HashMap<&'static str, Box Response>>, } impl HttpServer { @@ -488,7 +511,10 @@ registry_path: PathBuf, dl_path: PathBuf, token: Option, - api_responders: HashMap<&'static str, Box Response>>, + api_responders: HashMap< + &'static str, + Box Response>, + >, ) -> HttpServerHandle { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); @@ -532,6 +558,7 @@ let mut if_modified_since = None; let mut if_none_match = None; let mut authorization = None; + let mut content_len = None; loop { line.clear(); if buf.read_line(&mut line).unwrap() == 0 { @@ -549,15 +576,26 @@ "if-modified-since" => if_modified_since = Some(value), "if-none-match" => if_none_match = Some(value), "authorization" => authorization = Some(value), + "content-length" => content_len = Some(value), _ => {} } } + + let mut body = None; + if let Some(con_len) = content_len { + let len = con_len.parse::().unwrap(); + let mut content = vec![0u8; len as usize]; + buf.read_exact(&mut content).unwrap(); + body = Some(content) + } + let req = Request { authorization, if_modified_since, if_none_match, method, url, + body, }; println!("req: {:#?}", req); let response = self.route(&req); @@ -586,7 +624,7 @@ // Check for custom responder if let Some(responder) = self.custom_responders.get(req.url.path()) { - return responder(&req); + return responder(&req, self); } let path: Vec<_> = req.url.path()[1..].split('/').collect(); match (req.method.as_str(), path.as_slice()) { @@ -604,16 +642,21 @@ self.dl(&req) } } + // publish + ("put", ["api", "v1", "crates", "new"]) => { + if !authorized(true) { + self.unauthorized(req) + } else { + self.publish(req) + } + } // The remainder of the operators in the test framework do nothing other than responding 'ok'. // - // Note: We don't need to support anything real here because the testing framework publishes crates - // by writing directly to the filesystem instead. If the test framework is changed to publish - // via the HTTP API, then this should be made more complete. + // Note: We don't need to support anything real here because there are no tests that + // currently require anything other than publishing via the http api. - // publish - ("put", ["api", "v1", "crates", "new"]) // yank - | ("delete", ["api", "v1", "crates", .., "yank"]) + ("delete", ["api", "v1", "crates", .., "yank"]) // unyank | ("put", ["api", "v1", "crates", .., "unyank"]) // owners @@ -629,7 +672,7 @@ } /// Unauthorized response - fn unauthorized(&self, _req: &Request) -> Response { + pub fn unauthorized(&self, _req: &Request) -> Response { Response { code: 401, headers: vec![], @@ -638,7 +681,7 @@ } /// Not found response - fn not_found(&self, _req: &Request) -> Response { + pub fn not_found(&self, _req: &Request) -> Response { Response { code: 404, headers: vec![], @@ -647,7 +690,7 @@ } /// Respond OK without doing anything - fn ok(&self, _req: &Request) -> Response { + pub fn ok(&self, _req: &Request) -> Response { Response { code: 200, headers: vec![], @@ -655,8 +698,17 @@ } } + /// Return an internal server error (HTTP 500) + pub fn internal_server_error(&self, _req: &Request) -> Response { + Response { + code: 500, + headers: vec![], + body: br#"internal server error"#.to_vec(), + } + } + /// Serve the download endpoint - fn dl(&self, req: &Request) -> Response { + pub fn dl(&self, req: &Request) -> Response { let file = self .dl_path .join(req.url.path().strip_prefix("/dl/").unwrap()); @@ -672,7 +724,7 @@ } /// Serve the registry index - fn index(&self, req: &Request) -> Response { + pub fn index(&self, req: &Request) -> Response { let file = self .registry_path .join(req.url.path().strip_prefix("/index/").unwrap()); @@ -721,6 +773,72 @@ } } } + + pub fn publish(&self, req: &Request) -> Response { + if let Some(body) = &req.body { + // Get the metadata of the package + let (len, remaining) = body.split_at(4); + let json_len = u32::from_le_bytes(len.try_into().unwrap()); + let (json, remaining) = remaining.split_at(json_len as usize); + let new_crate = serde_json::from_slice::(json).unwrap(); + // Get the `.crate` file + let (len, remaining) = remaining.split_at(4); + let file_len = u32::from_le_bytes(len.try_into().unwrap()); + let (file, _remaining) = remaining.split_at(file_len as usize); + + // Write the `.crate` + let dst = self + .dl_path + .join(&new_crate.name) + .join(&new_crate.vers) + .join("download"); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(fs::write(&dst, file)); + + let deps = new_crate + .deps + .iter() + .map(|dep| { + let (name, package) = match &dep.explicit_name_in_toml { + Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())), + None => (dep.name.to_string(), None), + }; + serde_json::json!({ + "name": name, + "req": dep.version_req, + "features": dep.features, + "default_features": true, + "target": dep.target, + "optional": dep.optional, + "kind": dep.kind, + "registry": dep.registry, + "package": package, + }) + }) + .collect::>(); + + let line = create_index_line( + serde_json::json!(new_crate.name), + &new_crate.vers, + deps, + &cksum(file), + new_crate.features, + false, + new_crate.links, + None, + ); + + write_to_index(&self.registry_path, &new_crate.name, line, false); + + self.ok(&req) + } else { + Response { + code: 400, + headers: vec![], + body: b"The request was missing a body".to_vec(), + } + } + } } impl Package { @@ -780,13 +898,24 @@ pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), - contents: contents.to_string(), + contents: EntryData::Regular(contents.into()), mode, extra: false, }); self } + /// Adds a symlink to a path to the package. + pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package { + self.files.push(PackageFile { + path: dst.to_string(), + contents: EntryData::Symlink(src.into()), + mode: DEFAULT_MODE, + extra: false, + }); + self + } + /// Adds an "extra" file that is not rooted within the package. /// /// Normal files are automatically placed within a directory named @@ -795,7 +924,7 @@ pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), - contents: contents.to_string(), + contents: EntryData::Regular(contents.to_string()), mode: DEFAULT_MODE, extra: true, }); @@ -955,27 +1084,16 @@ } else { serde_json::json!(self.name) }; - // This emulates what crates.io may do in the future. - let (features, features2) = split_index_features(self.features.clone()); - let mut json = serde_json::json!({ - "name": name, - "vers": self.vers, - "deps": deps, - "cksum": cksum, - "features": features, - "yanked": self.yanked, - "links": self.links, - }); - if let Some(f2) = &features2 { - json["features2"] = serde_json::json!(f2); - json["v"] = serde_json::json!(2); - } - if let Some(v) = self.v { - json["v"] = serde_json::json!(v); - } - let line = json.to_string(); - - let file = make_dep_path(&self.name, false); + let line = create_index_line( + name, + &self.vers, + deps, + &cksum, + self.features.clone(), + self.yanked, + self.links.clone(), + self.v, + ); let registry_path = if self.alternative { alt_registry_path() @@ -983,38 +1101,7 @@ registry_path() }; - // Write file/line in the index. - let dst = if self.local { - registry_path.join("index").join(&file) - } else { - registry_path.join(&file) - }; - let prev = fs::read_to_string(&dst).unwrap_or_default(); - t!(fs::create_dir_all(dst.parent().unwrap())); - t!(fs::write(&dst, prev + &line[..] + "\n")); - - // Add the new file to the index. - if !self.local { - let repo = t!(git2::Repository::open(®istry_path)); - let mut index = t!(repo.index()); - t!(index.add_path(Path::new(&file))); - t!(index.write()); - let id = t!(index.write_tree()); - - // Commit this change. - let tree = t!(repo.find_tree(id)); - let sig = t!(repo.signature()); - let parent = t!(repo.refname_to_id("refs/heads/master")); - let parent = t!(repo.find_commit(parent)); - t!(repo.commit( - Some("HEAD"), - &sig, - &sig, - "Another commit", - &tree, - &[&parent] - )); - } + write_to_index(®istry_path, &self.name, line, self.local); cksum } @@ -1033,7 +1120,12 @@ self.append_manifest(&mut a); } if self.files.is_empty() { - self.append(&mut a, "src/lib.rs", DEFAULT_MODE, ""); + self.append( + &mut a, + "src/lib.rs", + DEFAULT_MODE, + &EntryData::Regular("".into()), + ); } else { for PackageFile { path, @@ -1107,10 +1199,15 @@ manifest.push_str("[lib]\nproc-macro = true\n"); } - self.append(ar, "Cargo.toml", DEFAULT_MODE, &manifest); + self.append( + ar, + "Cargo.toml", + DEFAULT_MODE, + &EntryData::Regular(manifest.into()), + ); } - fn append(&self, ar: &mut Builder, file: &str, mode: u32, contents: &str) { + fn append(&self, ar: &mut Builder, file: &str, mode: u32, contents: &EntryData) { self.append_raw( ar, &format!("{}-{}/{}", self.name, self.vers, file), @@ -1119,8 +1216,22 @@ ); } - fn append_raw(&self, ar: &mut Builder, path: &str, mode: u32, contents: &str) { + fn append_raw( + &self, + ar: &mut Builder, + path: &str, + mode: u32, + contents: &EntryData, + ) { let mut header = Header::new_ustar(); + let contents = match contents { + EntryData::Regular(contents) => contents.as_str(), + EntryData::Symlink(src) => { + header.set_entry_type(tar::EntryType::Symlink); + t!(header.set_link_name(src)); + "" // Symlink has no contents. + } + }; header.set_size(contents.len() as u64); t!(header.set_path(path)); header.set_mode(mode); @@ -1211,21 +1322,3 @@ self } } - -fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option) { - let mut features2 = FeatureMap::new(); - for (feat, values) in features.iter_mut() { - if values - .iter() - .any(|value| value.starts_with("dep:") || value.contains("?/")) - { - let new_values = values.drain(..).collect(); - features2.insert(feat.clone(), new_values); - } - } - if features2.is_empty() { - (features, None) - } else { - (features, Some(features2)) - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-util/src/paths.rs cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-util/src/paths.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/cargo-util/src/paths.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/cargo-util/src/paths.rs 2023-01-10 13:41:19.000000000 +0000 @@ -636,7 +636,7 @@ // it from backups, then rename it to the desired name. If we created the // directory directly where it should be and then excluded it from backups // we would risk a situation where cargo is interrupted right after the directory - // creation but before the exclusion the the directory would remain non-excluded from + // creation but before the exclusion the directory would remain non-excluded from // backups because we only perform exclusion right after we created the directory // ourselves. // @@ -651,7 +651,7 @@ // the directory being created concurrently by another thread or process as success, // hence the check below to follow the existing behavior. If we get an error at // rename() and suddently the directory (which didn't exist a moment earlier) exists - // we can infer from it it's another cargo process doing work. + // we can infer from it's another cargo process doing work. if let Err(e) = fs::rename(tempdir.path(), path) { if !path.exists() { return Err(anyhow::Error::from(e)); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/crates/crates-io/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/crates/crates-io/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/crates/crates-io/lib.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/crates/crates-io/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -36,7 +36,7 @@ pub max_version: String, } -#[derive(Serialize)] +#[derive(Serialize, Deserialize)] pub struct NewCrate { pub name: String, pub vers: String, @@ -57,7 +57,7 @@ pub links: Option, } -#[derive(Serialize)] +#[derive(Serialize, Deserialize)] pub struct NewCrateDependency { pub optional: bool, pub default_features: bool, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/changelog cargo-0.67.1+ds0ubuntu0.libgit2/debian/changelog --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/changelog 2023-01-18 20:42:34.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/changelog 2023-02-26 01:05:50.000000000 +0000 @@ -1,11 +1,25 @@ -cargo (0.66.0+ds0ubuntu0.libgit2-0ubuntu0.22.10) kinetic; urgency=medium +cargo (0.67.1+ds0ubuntu0.libgit2-0ubuntu0.22.10.1) kinetic; urgency=medium - * Backport to Kinetic (LP: #2000839) + * Backport to Kinetic (LP: #2005123) * Re-enable libgit2 vendoring: - d/control: remove libgit2-dev and libhttp-parser-dev from B-D - - include Rust provided patches to fix CVE-2022-46176 - -- Zixing Liu Wed, 18 Jan 2023 13:42:34 -0700 + -- Zixing Liu Sat, 25 Feb 2023 18:05:50 -0700 + +cargo (0.67.1+ds0ubuntu1-0ubuntu1) lunar; urgency=medium + + * Update to Cargo 0.67.1 (LP: #2005123) + - d/vendor-tarball-unsuspicious.txt: update unsuspicious list to exclude + removed source files + - d/p/cve/*: remove patches that are merged upstream + - d/make_orig_multi-pre-vendor.sh: downgrade clap to 4.0.15 to avoid + some UI tests behaving incorrectly + - d/p: refresh debian patches to adapt to Cargo 0.67.1 + - d/debcargo-conf.patch: refresh debconf patches to adapt to newer + vendored dependencies + - d/copyright: update copyright information for vendored dependencies + + -- Zixing Liu Wed, 15 Feb 2023 13:08:31 -0700 cargo (0.66.0+ds1-1ubuntu1) lunar; urgency=medium diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/control cargo-0.67.1+ds0ubuntu0.libgit2/debian/control --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/control 2023-01-18 20:42:34.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/control 2023-02-26 01:05:50.000000000 +0000 @@ -30,7 +30,7 @@ XSBC-Original-Vcs-Browser: https://salsa.debian.org/rust-team/cargo Vcs-Git: https://git.launchpad.net/~canonical-foundations/ubuntu/+source/cargo Vcs-Browser: https://git.launchpad.net/~canonical-foundations/ubuntu/+source/cargo -XS-Vendored-Sources-Rust: adler@1.0.2, aho-corasick@0.7.20, anyhow@1.0.68, arrayvec@0.5.2, atty@0.2.14, autocfg@1.1.0, base64@0.13.1, bitflags@1.3.2, bitmaps@2.1.0, block-buffer@0.10.3, bstr@0.2.17, bstr@1.1.0, bytes@1.3.0, bytesize@1.1.0, cc@1.0.78, cfg-if@1.0.0, clap@3.2.23, clap_lex@0.2.4, combine@4.6.6, commoncrypto-sys@0.2.0, commoncrypto@0.2.0, concolor-query@0.0.5, concolor@0.0.8, content_inspector@0.2.4, core-foundation-sys@0.8.3, core-foundation@0.9.3, cpufeatures@0.2.5, crc32fast@1.3.2, crypto-common@0.1.6, crypto-hash@0.3.4, curl-sys@0.4.59+curl-7.86.0, curl@0.4.44, digest@0.10.6, dunce@1.0.3, either@1.8.0, env_logger@0.7.1, env_logger@0.9.3, fastrand@1.8.0, filetime@0.2.19, flate2@1.0.25, fnv@1.0.7, foreign-types-shared@0.1.1, foreign-types@0.3.2, form_urlencoded@1.1.0, fwdansi@1.1.0, generic-array@0.14.6, git2-curl@0.17.0, git2@0.16.0, glob@0.3.1, globset@0.4.10, hashbrown@0.12.3, hex@0.4.3, hmac@0.12.1, home@0.5.4, humantime@1.3.0, humantime@2.1.0, idna@0.3.0, ignore@0.4.19, im-rc@15.1.0, indexmap@1.9.2, itertools@0.10.5, itoa@1.0.5, jobserver@0.1.25, kstring@2.0.0, lazy_static@1.4.0, lazycell@1.3.0, libc@0.2.139, libgit2-sys@0.14.1+1.5.0, libnghttp2-sys@0.1.7+1.45.0, libssh2-sys@0.2.23, libz-sys@1.1.8, log@0.4.17, memchr@2.5.0, miniz_oxide@0.6.2, miow@0.3.7, normalize-line-endings@0.3.0, once_cell@1.17.0, opener@0.5.0, openssl-macros@0.1.0, openssl-probe@0.1.5, openssl-sys@0.9.80, openssl@0.10.45, os_info@3.5.1, os_str_bytes@6.4.1, pathdiff@0.2.1, percent-encoding@2.2.0, pkg-config@0.3.26, pretty_env_logger@0.4.0, proc-macro2@1.0.49, quick-error@1.2.3, quote@1.0.23, rand_core@0.6.4, rand_xoshiro@0.6.0, redox_syscall@0.2.16, regex-automata@0.1.10, regex-syntax@0.6.28, regex@1.7.0, remove_dir_all@0.5.3, rustc-workspace-hack@1.0.0, rustfix@0.6.1, ryu@1.0.12, same-file@1.0.6, schannel@0.1.19, semver@1.0.16, serde@1.0.152, serde_derive@1.0.152, serde_ignored@0.1.7, serde_json@1.0.91, sha1@0.10.5, shell-escape@0.1.5, similar@2.2.1, sized-chunks@0.6.5, snapbox-macros@0.3.1, snapbox@0.3.3, socket2@0.4.7, static_assertions@1.1.0, strip-ansi-escapes@0.1.1, strsim@0.10.0, subtle@2.4.1, syn@1.0.107, tar@0.4.38, tempfile@3.3.0, termcolor@1.1.3, textwrap@0.16.0, thread_local@1.1.4, tinyvec@1.6.0, tinyvec_macros@0.1.0, toml_edit@0.14.4, typenum@1.16.0, unicode-bidi@0.3.8, unicode-ident@1.0.6, unicode-normalization@0.1.22, unicode-width@0.1.10, unicode-xid@0.2.4, url@2.3.1, utf8parse@0.2.0, vcpkg@0.2.15, version_check@0.9.4, vte@0.10.1, vte_generate_state_changes@0.1.1, walkdir@2.3.2, winapi-i686-pc-windows-gnu@0.4.0, winapi-util@0.1.5, winapi-x86_64-pc-windows-gnu@0.4.0, winapi@0.3.9, yansi@0.5.1 +XS-Vendored-Sources-Rust: adler@1.0.2, aho-corasick@0.7.20, anyhow@1.0.68, arrayvec@0.5.2, atty@0.2.14, autocfg@1.1.0, base64@0.13.1, bitflags@1.3.2, bitmaps@2.1.0, block-buffer@0.10.3, bstr@1.1.0, bytes@1.4.0, bytesize@1.1.0, cc@1.0.79, cfg-if@1.0.0, clap@4.0.15, clap_lex@0.3.1, combine@4.6.6, commoncrypto-sys@0.2.0, commoncrypto@0.2.0, concolor-query@0.0.5, concolor@0.0.8, content_inspector@0.2.4, core-foundation-sys@0.8.3, core-foundation@0.9.3, cpufeatures@0.2.5, crc32fast@1.3.2, crypto-common@0.1.6, crypto-hash@0.3.4, curl-sys@0.4.59+curl-7.86.0, curl@0.4.44, digest@0.10.6, dunce@1.0.3, either@1.8.1, env_logger@0.7.1, env_logger@0.9.3, fastrand@1.8.0, filetime@0.2.19, flate2@1.0.25, fnv@1.0.7, foreign-types-shared@0.1.1, foreign-types@0.3.2, form_urlencoded@1.1.0, fwdansi@1.1.0, generic-array@0.14.6, git2-curl@0.17.0, git2@0.16.1, glob@0.3.1, globset@0.4.10, hashbrown@0.12.3, hex@0.4.3, hmac@0.12.1, home@0.5.4, humantime@1.3.0, humantime@2.1.0, idna@0.3.0, ignore@0.4.20, im-rc@15.1.0, indexmap@1.9.2, itertools@0.10.5, itoa@1.0.5, jobserver@0.1.25, kstring@2.0.0, lazy_static@1.4.0, lazycell@1.3.0, libc@0.2.139, libgit2-sys@0.14.2+1.5.1, libnghttp2-sys@0.1.7+1.45.0, libssh2-sys@0.2.23, libz-sys@1.1.8, log@0.4.17, memchr@2.5.0, miniz_oxide@0.6.2, miow@0.3.7, normalize-line-endings@0.3.0, num-traits@0.2.15, once_cell@1.17.0, opener@0.5.2, openssl-macros@0.1.0, openssl-probe@0.1.5, openssl-sys@0.9.80, openssl@0.10.45, ordered-float@2.10.0, os_info@3.6.0, os_str_bytes@6.4.1, pathdiff@0.2.1, percent-encoding@2.2.0, pkg-config@0.3.26, pretty_env_logger@0.4.0, proc-macro2@1.0.50, quick-error@1.2.3, quote@1.0.23, rand_core@0.6.4, rand_xoshiro@0.6.0, redox_syscall@0.2.16, regex-automata@0.1.10, regex-syntax@0.6.28, regex@1.7.1, remove_dir_all@0.5.3, rustc-workspace-hack@1.0.0, rustfix@0.6.1, ryu@1.0.12, same-file@1.0.6, schannel@0.1.19, semver@1.0.16, serde-value@0.7.0, serde@1.0.152, serde_derive@1.0.152, serde_ignored@0.1.7, serde_json@1.0.91, sha1@0.10.5, shell-escape@0.1.5, similar@2.2.1, sized-chunks@0.6.5, snapbox-macros@0.3.1, snapbox@0.3.3, socket2@0.4.7, static_assertions@1.1.0, strip-ansi-escapes@0.1.1, strsim@0.10.0, subtle@2.4.1, syn@1.0.107, tar@0.4.38, tempfile@3.3.0, termcolor@1.2.0, thread_local@1.1.4, tinyvec@1.6.0, tinyvec_macros@0.1.0, toml_datetime@0.5.1, toml_edit@0.15.0, typenum@1.16.0, unicode-bidi@0.3.10, unicode-ident@1.0.6, unicode-normalization@0.1.22, unicode-width@0.1.10, unicode-xid@0.2.4, url@2.3.1, utf8parse@0.2.0, vcpkg@0.2.15, version_check@0.9.4, vte@0.10.1, vte_generate_state_changes@0.1.1, walkdir@2.3.2, winapi-i686-pc-windows-gnu@0.4.0, winapi-util@0.1.5, winapi-x86_64-pc-windows-gnu@0.4.0, winapi@0.3.9, yansi@0.5.1 Package: cargo Architecture: any diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/copyright cargo-0.67.1+ds0ubuntu0.libgit2/debian/copyright --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/copyright 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/copyright 2023-02-26 01:05:50.000000000 +0000 @@ -29,6 +29,7 @@ vendor/glob/* vendor/libc/* vendor/log/* + vendor/num-traits/* vendor/regex/* vendor/regex-syntax/* vendor/semver/* @@ -138,7 +139,6 @@ Files: vendor/bstr/* - vendor/bstr-*/* Copyright: 2015-2019 Andrew Gallant License: MIT or Apache-2.0 Comment: @@ -200,11 +200,6 @@ 2018, Alex Crichton License: MIT or Apache-2.0 -Files: vendor/crossbeam-utils/* -Copyright: 2017-2018 The Crossbeam Project Developers -License: MIT or Apache-2.0 -Comment: see https://github.com/crossbeam-rs - Files: vendor/crypto-common/* Copyright: 2017-2023 RustCrypto Developers License: MIT OR Apache-2.0 @@ -345,15 +340,17 @@ License: Apache-2.0 Comment: see https://github.com/derekdreery/normalize-line-endings -Files: vendor/num_cpus/* -Copyright: 2015, Sean McArthur -License: MIT or Apache-2.0 - Files: vendor/once_cell/* Copyright: 2018-2021 Aleksey Kladov License: MIT OR Apache-2.0 Comment: see https://github.com/matklad/once_cell +Files: vendor/ordered-float/* +Copyright: 2014-2018 Matt Brubeck + 2014-2018 Jonathan Reem +License: MIT +Comment: see https://github.com/reem/rust-ordered-float + Files: vendor/opener/* Copyright: 2018 Brian Bowman License: MIT or Apache-2.0 @@ -439,6 +436,11 @@ Comment: see https://github.com/serde-rs see https://github.com/dtolnay/serde-ignored +Files: vendor/serde-value/* +Copyright: arcnmx +License: MIT +Comment: see https://github.com/arcnmx/serde-value + Files: vendor/sha1/* Copyright: 2016-2022 RustCrypto Developers License: MIT OR Apache-2.0 @@ -483,10 +485,6 @@ License: MIT or Apache-2.0 Comment: see https://github.com/dtolnay/syn -Files: vendor/textwrap/* -Copyright: 2016, Martin Geisler -License: MIT - Files: vendor/tempfile/* Copyright: 2015, Steven Allen License: MIT or Apache-2.0 @@ -512,6 +510,11 @@ License: MIT or Apache-2.0 Comment: see https://github.com/ordian/toml_edit +Files: vendor/toml_datetime/* +Copyright: 2017-2019 Alex Crichton +License: MIT or Apache-2.0 +Comment: see https://github.com/toml-rs/toml + Files: vendor/typenum/* Copyright: 2015-2018 Paho Lurie-Gregg 2015-2018 Andre Bogus diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/debcargo-conf.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/debcargo-conf.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/debcargo-conf.patch 2023-01-17 13:25:36.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/debcargo-conf.patch 2023-02-26 01:05:50.000000000 +0000 @@ -1,24 +1,6 @@ -For various reasons sometimes the debcargo-conf patches can't be applied as-is, -so we further patch them here. The reasons are as follows: - -- commoncrypto, commoncrypto-sys: these crates were released before crates.io - automatically re-wrote all Cargo.toml files. cargo-vendor outputs the - un-re-written original Cargo.toml files, but debcargo will forcibly rewrite - even un-re-written ones that were published to crates.io. The patch below - rewrites our existing patches so they work against the un-re-written ones. - -- unicode-normalization, vte, tempfile: have dependency relaxation that applies, - but doesn't match the vendored copy - -- concolor: more recent version in debcargo-conf, can be dropped once cargo - upstream updates - -- clap: clap is v4 in debcargo-conf, patches from clap-3 temporarily patched in - until we update to a cargo version using clap 4.x - diff --git a/src/clap/debian/patches/disable-derive-deprecated.patch b/src/clap/debian/patches/disable-derive-deprecated.patch new file mode 100644 -index 000000000..6ed82171f +index 000000000..46d269729 --- /dev/null +++ b/src/clap/debian/patches/disable-derive-deprecated.patch @@ -0,0 +1,11 @@ @@ -28,13 +10,13 @@ + "color", + "suggestions", + ] -+-deprecated = ["clap_derive/deprecated"] ++-deprecated = ["clap_derive?/deprecated"] ++deprecated = [] + derive = [ + "clap_derive", + "once_cell", diff --git a/src/clap/debian/patches/disable-snapbox.patch b/src/clap/debian/patches/disable-snapbox.patch -index a6fbd083a..9335518c4 100644 +index a6fbd083a..a702a4fb9 100644 --- a/src/clap/debian/patches/disable-snapbox.patch +++ b/src/clap/debian/patches/disable-snapbox.patch @@ -1,6 +1,12 @@ @@ -46,37 +28,24 @@ + version = "1.1.0" + -[dev-dependencies.snapbox] ---version = "0.4" -+-version = "0.2.9" + -version = "0.4" - + [dev-dependencies.static_assertions] + version = "1.1.0" + diff --git a/src/clap/debian/patches/disable-trycmd.diff b/src/clap/debian/patches/disable-trycmd.diff -index aeb0bbad4..053bf14f9 100644 +index aeb0bbad4..1a3cf5201 100644 --- a/src/clap/debian/patches/disable-trycmd.diff +++ b/src/clap/debian/patches/disable-trycmd.diff -@@ -1,8 +1,11 @@ - --- a/Cargo.toml +@@ -2,7 +2,7 @@ +++ b/Cargo.toml --@@ -413,9 +412,0 @@ -+@@ -410,15 +410,6 @@ -+ [dev-dependencies.trybuild] -+ version = "1.0.18" -+ + @@ -413,9 +412,0 @@ -[dev-dependencies.trycmd] --version = "0.14.3" +-version = "0.13" -features = [ - "color-auto", - "diff", -@@ -10,3 +13,6 @@ - -] - -default-features = false - - -+ [features] -+ cargo = ["once_cell"] -+ color = [ diff --git a/src/clap/debian/patches/disable-unic-emoji-char.patch b/src/clap/debian/patches/disable-unic-emoji-char.patch deleted file mode 100644 index 97aa25af0..000000000 @@ -119,88 +88,9 @@ - - #[test] - #[cfg(feature = "unicode")] -diff --git a/src/clap/debian/patches/disable-unstable-doc.patch b/src/clap/debian/patches/disable-unstable-doc.patch -index 00de9ce79..404f1d9cf 100644 ---- a/src/clap/debian/patches/disable-unstable-doc.patch -+++ b/src/clap/debian/patches/disable-unstable-doc.patch -@@ -1,25 +1,38 @@ - --- a/Cargo.toml - +++ b/Cargo.toml --@@ -39,3 +39,2 @@ -+@@ -37,7 +37,6 @@ -+ repository = "https://github.com/clap-rs/clap" -+ - [package.metadata.docs.rs] - -features = ["unstable-doc"] - rustdoc-args = [ --@@ -49,5 +48,2 @@ -+ "--cfg", -+ "docsrs", -+@@ -47,9 +46,6 @@ -+ "-Zrustdoc-scrape-examples=examples", -+ ] - - -[package.metadata.playground] - -features = ["unstable-doc"] - - - [package.metadata.release] --@@ -439,12 +435,2 @@ -+ shared-version = true -+ tag-name = "v{{version}}" -+@@ -437,17 +433,6 @@ -+ "textwrap/unicode-width", -+ "unicase", - ] - -unstable-doc = [ - - "derive", - - "cargo", - - "wrap_help", -+- "yaml", - - "env", - - "unicode", --- "string", -+- "regex", - - "unstable-replace", - - "unstable-grouped", - -] - unstable-grouped = [] -+ unstable-replace = [] -+ unstable-v4 = [ -diff --git a/src/clap/debian/patches/disable-unstable-v4.patch b/src/clap/debian/patches/disable-unstable-v4.patch -new file mode 100644 -index 000000000..2993a959a ---- /dev/null -+++ b/src/clap/debian/patches/disable-unstable-v4.patch -@@ -0,0 +1,13 @@ -+--- a/Cargo.toml -++++ b/Cargo.toml -+@@ -435,10 +435,6 @@ -+ ] -+ unstable-grouped = [] -+ unstable-replace = [] -+-unstable-v4 = [ -+- "clap_derive/unstable-v4", -+- "deprecated", -+-] -+ wrap_help = [ -+ "terminal_size", -+ "textwrap/terminal_size", -diff --git a/src/clap/debian/patches/disable-unstable-v5.patch b/src/clap/debian/patches/disable-unstable-v5.patch -deleted file mode 100644 -index 5c2a59a30..000000000 ---- a/src/clap/debian/patches/disable-unstable-v5.patch -+++ /dev/null -@@ -1,7 +0,0 @@ ----- a/Cargo.toml --+++ b/Cargo.toml --@@ -438,4 +437,0 @@ ---unstable-v5 = [ --- "clap_derive?/unstable-v5", --- "deprecated", ---] diff --git a/src/clap/debian/patches/once-cell-non-optional.patch b/src/clap/debian/patches/once-cell-non-optional.patch new file mode 100644 -index 000000000..0e6692e81 +index 000000000..d77aad42a --- /dev/null +++ b/src/clap/debian/patches/once-cell-non-optional.patch @@ -0,0 +1,29 @@ @@ -218,7 +108,7 @@ + version = "1.0.18" + + [features] -+-cargo = ["once_cell"] ++-cargo = ["dep:once_cell"] ++cargo = [] + color = [ + "atty", @@ -227,9 +117,9 @@ + ] + deprecated = [] + derive = [ -+- "clap_derive", -+- "once_cell", -++ "clap_derive" ++- "dep:clap_derive", ++- "dep:once_cell", +++ "dep:clap_derive" + ] + env = [] + std = ["indexmap/std"] @@ -284,14 +174,12 @@ - ] - debug = [ diff --git a/src/clap/debian/patches/series b/src/clap/debian/patches/series -index 137658685..47bdb2c6b 100644 +index 137658685..847c866d5 100644 --- a/src/clap/debian/patches/series +++ b/src/clap/debian/patches/series -@@ -1,6 +1,6 @@ - disable-trycmd.diff +@@ -2,5 +2,5 @@ disable-trycmd.diff disable-unstable-doc.patch --disable-unstable-v5.patch -+disable-unstable-v4.patch + disable-unstable-v5.patch disable-snapbox.patch -revert-switch-to-is-terminal.patch -disable-unic-emoji-char.patch @@ -378,6 +266,77 @@ @@ -1 +1 @@ -relax-dep.diff +#relax-dep.diff +diff --git a/src/toml-edit/debian/patches/series b/src/toml-edit/debian/patches/series +index c822905c1..8fe71944e 100644 +--- a/src/toml_edit/debian/patches/series ++++ b/src/toml_edit/debian/patches/series +@@ -1,4 +1,4 @@ + drop-criterion.patch +-drop-pretty-assertions.patch ++#drop-pretty-assertions.patch + drop-snapbox.patch + drop-toml-test-harness.patch +diff --git a/src/toml_edit/debian/patches/drop-criterion.patch b/src/toml_edit/debian/patches/drop-criterion.patch +index fcb7024e0..9aa594ed9 100644 +--- a/src/toml_edit/debian/patches/drop-criterion.patch ++++ b/src/toml_edit/debian/patches/drop-criterion.patch +@@ -25,13 +25,12 @@ Index: toml-edit/Cargo.toml + [dependencies.combine] + version = "4.6.3" + +-@@ -133,9 +126,6 @@ version = "1" +- features = ["derive"] +- optional = true ++@@ -133,9 +126,6 @@ ++ [dependencies.toml_datetime] ++ version = "0.5.0" + + -[dev-dependencies.criterion] +--version = "0.3" ++-version = "0.4.0" + - +- [dev-dependencies.pretty_assertions] +- version = "1.0.0" +- ++ [dev-dependencies.serde_json] ++ version = "1.0.85" +diff --git a/src/toml_edit/debian/patches/drop-snapbox.patch b/src/toml_edit/debian/patches/drop-snapbox.patch +index 058617c71..74c5657b2 100644 +--- a/src/toml_edit/debian/patches/drop-snapbox.patch ++++ b/src/toml_edit/debian/patches/drop-snapbox.patch +@@ -74,12 +74,12 @@ Index: toml-edit/Cargo.toml + version = "4.6.3" + @@ -129,10 +165,6 @@ optional = true + [dev-dependencies.serde_json] +- version = "1.0.44" ++ version = "1.0.85" + + -[dev-dependencies.snapbox] +--version = "0.2.9" ++-version = "0.4.0" + -features = ["harness"] + - + [dev-dependencies.toml] +- version = "0.5" ++ version = "0.5.9" + +diff --git a/src/toml-edit/debian/patches/drop-toml-test-harness.patch b/src/toml-edit/debian/patches/drop-toml-test-harness.patch +index 8599ddfd1..2ec06c19e 100644 +--- a/src/toml_edit/debian/patches/drop-toml-test-harness.patch ++++ b/src/toml_edit/debian/patches/drop-toml-test-harness.patch +@@ -39,10 +39,10 @@ Index: toml-edit/Cargo.toml + [[test]] + @@ -169,9 +141,6 @@ version = "1.0.44" + [dev-dependencies.toml] +- version = "0.5" ++ version = "0.5.9" + + -[dev-dependencies.toml-test-harness] +--version = "0.3" ++-version = "0.4.1" + - + [features] + default = [] diff --git a/src/unicode-normalization/debian/patches/series b/src/unicode-normalization/debian/patches/series index 2c1db10bd..39fe7a4da 100644 --- a/src/unicode-normalization/debian/patches/series diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/make_orig_multi-pre-vendor.sh cargo-0.67.1+ds0ubuntu0.libgit2/debian/make_orig_multi-pre-vendor.sh --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/make_orig_multi-pre-vendor.sh 2023-01-18 20:42:34.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/make_orig_multi-pre-vendor.sh 2023-02-26 01:05:50.000000000 +0000 @@ -17,12 +17,5 @@ # avoid pulling in windows-sys for now cargo update -p schannel --precise 0.1.19 - -# pin the dependencies as Debian intended -cargo update -p ignore --precise 0.4.19 -cargo update -p regex --precise 1.7.0 -cargo update -p proc-macro2 --precise 1.0.49 -cargo update -p termcolor --precise 1.1.3 -cargo update -p git2 --precise 0.16.0 -cargo update -p libgit2-sys --precise 0.14.1+1.5.0 -cargo update -p unicode-bidi --precise 0.3.8 +# the UI tests are targeted at clap/4.0.15 +cargo update -p clap --precise 4.0.15 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/make_orig_multi.sh cargo-0.67.1+ds0ubuntu0.libgit2/debian/make_orig_multi.sh --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/make_orig_multi.sh 2023-01-17 14:08:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/make_orig_multi.sh 2023-02-26 01:05:50.000000000 +0000 @@ -34,14 +34,6 @@ tar -xaf "${TMPDIR}/cargo_${CARGO_VER}.orig.tar.gz" -C cargo --strip-components=1 cd cargo -# special patch for CVE fix - we want to vendor the updated/fixed dependencies! -echo "Applying CVE-2022-46176 patches"; -for p in "${SRCDIR}/debian/patches/cve/"*.patch; do - echo "$(basename "$p")" - patch -p1 < "$p" - echo "$p" >> .cve-patches -done - # Download build-deps via cargo-vendor export GIT_AUTHOR_NAME="deb-build" export GIT_AUTHOR_EMAIL="<>" @@ -63,14 +55,6 @@ rm -rf vendor-scan -# special patch for CVE fix - unapply to keep orig.tar.gz pristine -echo "Unapplying CVE-2022-46176 patches"; -tac .cve-patches | while read p; do - echo "$(basename "$p")" - patch -Rp1 < "$p" -done -rm .cve-patches - # Pack it up, reproducibly tar --sort=name \ --auto-compress \ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/2002_disable-net-tests.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/2002_disable-net-tests.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/2002_disable-net-tests.patch 2023-01-17 13:46:33.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/2002_disable-net-tests.patch 2023-02-26 01:05:50.000000000 +0000 @@ -43,14 +43,3 @@ fn net_retry_git_outputs_warning() { let p = project() .file( ---- a/tests/testsuite/publish.rs -+++ b/tests/testsuite/publish.rs -@@ -1584,7 +1584,7 @@ - .run(); - } - --#[cargo_test] -+#[allow(dead_code)] - fn api_curl_error() { - // Registry has a network error. - let _registry = registry::RegistryBuilder::new() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-01-validate-ssh-host.keys.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-01-validate-ssh-host.keys.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-01-validate-ssh-host.keys.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-01-validate-ssh-host.keys.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,586 +0,0 @@ -This patch is based on the upstream commit described below, adapted for use -in the Debian package by Peter Michael Green. - -commit 1387fd4105b242fa2d24ad99d10a5b1af23f293e -Author: Eric Huss -Date: Wed Dec 7 18:52:00 2022 -0800 - - Validate SSH host keys - -Index: cargo/src/cargo/sources/git/known_hosts.rs -=================================================================== ---- /dev/null -+++ cargo/src/cargo/sources/git/known_hosts.rs -@@ -0,0 +1,439 @@ -+//! SSH host key validation support. -+//! -+//! A primary goal with this implementation is to provide user-friendly error -+//! messages, guiding them to understand the issue and how to resolve it. -+//! -+//! Note that there are a lot of limitations here. This reads OpenSSH -+//! known_hosts files from well-known locations, but it does not read OpenSSH -+//! config files. The config file can change the behavior of how OpenSSH -+//! handles known_hosts files. For example, some things we don't handle: -+//! -+//! - `GlobalKnownHostsFile` — Changes the location of the global host file. -+//! - `UserKnownHostsFile` — Changes the location of the user's host file. -+//! - `KnownHostsCommand` — A command to fetch known hosts. -+//! - `CheckHostIP` — DNS spoofing checks. -+//! - `VisualHostKey` — Shows a visual ascii-art key. -+//! - `VerifyHostKeyDNS` — Uses SSHFP DNS records to fetch a host key. -+//! -+//! There's also a number of things that aren't supported but could be easily -+//! added (it just adds a little complexity). For example, hashed hostnames, -+//! hostname patterns, and revoked markers. See "FIXME" comments littered in -+//! this file. -+ -+use git2::cert::Cert; -+use git2::CertificateCheckStatus; -+use std::collections::HashSet; -+use std::fmt::Write; -+use std::path::{Path, PathBuf}; -+ -+/// These are host keys that are hard-coded in cargo to provide convenience. -+/// -+/// If GitHub ever publishes new keys, the user can add them to their own -+/// configuration file to use those instead. -+/// -+/// The GitHub keys are sourced from or -+/// . -+/// -+/// These will be ignored if the user adds their own entries for `github.com`, -+/// which can be useful if GitHub ever revokes their old keys. -+static BUNDLED_KEYS: &[(&str, &str, &str)] = &[ -+ ("github.com", "ssh-ed25519", "AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl"), -+ ("github.com", "ecdsa-sha2-nistp256", "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg="), -+ ("github.com", "ssh-rsa", "AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ=="), -+]; -+ -+enum KnownHostError { -+ /// Some general error happened while validating the known hosts. -+ CheckError(anyhow::Error), -+ /// The host key was not found. -+ HostKeyNotFound { -+ hostname: String, -+ key_type: git2::cert::SshHostKeyType, -+ remote_host_key: String, -+ remote_fingerprint: String, -+ other_hosts: Vec, -+ }, -+ /// The host key was found, but does not match the remote's key. -+ HostKeyHasChanged { -+ hostname: String, -+ key_type: git2::cert::SshHostKeyType, -+ old_known_host: KnownHost, -+ remote_host_key: String, -+ remote_fingerprint: String, -+ }, -+} -+ -+impl From for KnownHostError { -+ fn from(err: anyhow::Error) -> KnownHostError { -+ KnownHostError::CheckError(err.into()) -+ } -+} -+ -+/// The location where a host key was located. -+#[derive(Clone)] -+enum KnownHostLocation { -+ /// Loaded from a file from disk. -+ File { path: PathBuf, lineno: u32 }, -+ /// Part of the hard-coded bundled keys in Cargo. -+ Bundled, -+} -+ -+/// The git2 callback used to validate a certificate (only ssh known hosts are validated). -+pub fn certificate_check( -+ cert: &Cert<'_>, -+ host: &str, -+ port: Option, -+) -> Result { -+ let Some(host_key) = cert.as_hostkey() else { -+ // Return passthrough for TLS X509 certificates to use whatever validation -+ // was done in git2. -+ return Ok(CertificateCheckStatus::CertificatePassthrough) -+ }; -+ // If a nonstandard port is in use, check for that first. -+ // The fallback to check without a port is handled in the HostKeyNotFound handler. -+ let host_maybe_port = match port { -+ Some(port) if port != 22 => format!("[{host}]:{port}"), -+ _ => host.to_string(), -+ }; -+ // The error message must be constructed as a string to pass through the libgit2 C API. -+ let err_msg = match check_ssh_known_hosts(host_key, &host_maybe_port) { -+ Ok(()) => { -+ return Ok(CertificateCheckStatus::CertificateOk); -+ } -+ Err(KnownHostError::CheckError(e)) => { -+ format!("error: failed to validate host key:\n{:#}", e) -+ } -+ Err(KnownHostError::HostKeyNotFound { -+ hostname, -+ key_type, -+ remote_host_key, -+ remote_fingerprint, -+ other_hosts, -+ }) => { -+ // Try checking without the port. -+ if port.is_some() -+ && !matches!(port, Some(22)) -+ && check_ssh_known_hosts(host_key, host).is_ok() -+ { -+ return Ok(CertificateCheckStatus::CertificateOk); -+ } -+ let key_type_short_name = key_type.short_name(); -+ let key_type_name = key_type.name(); -+ let known_hosts_location = user_known_host_location_to_add(); -+ let other_hosts_message = if other_hosts.is_empty() { -+ String::new() -+ } else { -+ let mut msg = String::from( -+ "Note: This host key was found, \ -+ but is associated with a different host:\n", -+ ); -+ for known_host in other_hosts { -+ let loc = match known_host.location { -+ KnownHostLocation::File { path, lineno } => { -+ format!("{} line {lineno}", path.display()) -+ } -+ KnownHostLocation::Bundled => format!("bundled with cargo"), -+ }; -+ write!(msg, " {loc}: {}\n", known_host.patterns).unwrap(); -+ } -+ msg -+ }; -+ format!("error: unknown SSH host key\n\ -+ The SSH host key for `{hostname}` is not known and cannot be validated.\n\ -+ \n\ -+ To resolve this issue, add the host key to {known_hosts_location}\n\ -+ \n\ -+ The key to add is:\n\ -+ \n\ -+ {hostname} {key_type_name} {remote_host_key}\n\ -+ \n\ -+ The {key_type_short_name} key fingerprint is: SHA256:{remote_fingerprint}\n\ -+ This fingerprint should be validated with the server administrator that it is correct.\n\ -+ {other_hosts_message}\n\ -+ See https://doc.rust-lang.org/nightly/cargo/appendix/git-authentication.html#ssh-known-hosts \ -+ for more information.\n\ -+ ") -+ } -+ Err(KnownHostError::HostKeyHasChanged { -+ hostname, -+ key_type, -+ old_known_host, -+ remote_host_key, -+ remote_fingerprint, -+ }) => { -+ let key_type_short_name = key_type.short_name(); -+ let key_type_name = key_type.name(); -+ let known_hosts_location = user_known_host_location_to_add(); -+ let old_key_resolution = match old_known_host.location { -+ KnownHostLocation::File { path, lineno } => { -+ let old_key_location = path.display(); -+ format!( -+ "removing the old {key_type_name} key for `{hostname}` \ -+ located at {old_key_location} line {lineno}, \ -+ and adding the new key to {known_hosts_location}", -+ ) -+ } -+ KnownHostLocation::Bundled => { -+ format!( -+ "adding the new key to {known_hosts_location}\n\ -+ The current host key is bundled as part of Cargo." -+ ) -+ } -+ }; -+ format!("error: SSH host key has changed for `{hostname}`\n\ -+ *********************************\n\ -+ * WARNING: HOST KEY HAS CHANGED *\n\ -+ *********************************\n\ -+ This may be caused by a man-in-the-middle attack, or the \ -+ server may have changed its host key.\n\ -+ \n\ -+ The {key_type_short_name} fingerprint for the key from the remote host is:\n\ -+ SHA256:{remote_fingerprint}\n\ -+ \n\ -+ You are strongly encouraged to contact the server \ -+ administrator for `{hostname}` to verify that this new key is \ -+ correct.\n\ -+ \n\ -+ If you can verify that the server has a new key, you can \ -+ resolve this error by {old_key_resolution}\n\ -+ \n\ -+ The key provided by the remote host is:\n\ -+ \n\ -+ {hostname} {key_type_name} {remote_host_key}\n\ -+ \n\ -+ See https://doc.rust-lang.org/nightly/cargo/appendix/git-authentication.html#ssh-known-hosts \ -+ for more information.\n\ -+ ") -+ } -+ }; -+ Err(git2::Error::new( -+ git2::ErrorCode::GenericError, -+ git2::ErrorClass::Callback, -+ err_msg, -+ )) -+} -+ -+/// Checks if the given host/host key pair is known. -+fn check_ssh_known_hosts( -+ cert_host_key: &git2::cert::CertHostkey<'_>, -+ host: &str, -+) -> Result<(), KnownHostError> { -+ let Some(remote_host_key) = cert_host_key.hostkey() else { -+ return Err(anyhow::format_err!("remote host key is not available").into()); -+ }; -+ let remote_key_type = cert_host_key.hostkey_type().unwrap(); -+ // `changed_key` keeps track of any entries where the key has changed. -+ let mut changed_key = None; -+ // `other_hosts` keeps track of any entries that have an identical key, -+ // but a different hostname. -+ let mut other_hosts = Vec::new(); -+ -+ // Collect all the known host entries from disk. -+ let mut known_hosts = Vec::new(); -+ for path in known_host_files() { -+ if !path.exists() { -+ continue; -+ } -+ let hosts = load_hostfile(&path)?; -+ known_hosts.extend(hosts); -+ } -+ // Load the bundled keys. Don't add keys for hosts that the user has -+ // configured, which gives them the option to override them. This could be -+ // useful if the keys are ever revoked. -+ let configured_hosts: HashSet<_> = known_hosts -+ .iter() -+ .flat_map(|known_host| { -+ known_host -+ .patterns -+ .split(',') -+ .map(|pattern| pattern.to_lowercase()) -+ }) -+ .collect(); -+ for (patterns, key_type, key) in BUNDLED_KEYS { -+ if !configured_hosts.contains(*patterns) { -+ let key = base64::decode(key).unwrap(); -+ known_hosts.push(KnownHost { -+ location: KnownHostLocation::Bundled, -+ patterns: patterns.to_string(), -+ key_type: key_type.to_string(), -+ key, -+ }); -+ } -+ } -+ -+ for known_host in known_hosts { -+ // The key type from libgit2 needs to match the key type from the host file. -+ if known_host.key_type != remote_key_type.name() { -+ continue; -+ } -+ let key_matches = known_host.key == remote_host_key; -+ if !known_host.host_matches(host) { -+ // `name` can be None for hashed hostnames (which libgit2 does not expose). -+ if key_matches { -+ other_hosts.push(known_host.clone()); -+ } -+ continue; -+ } -+ if key_matches { -+ return Ok(()); -+ } -+ // The host and key type matched, but the key itself did not. -+ // This indicates the key has changed. -+ // This is only reported as an error if no subsequent lines have a -+ // correct key. -+ changed_key = Some(known_host.clone()); -+ } -+ // Older versions of OpenSSH (before 6.8, March 2015) showed MD5 -+ // fingerprints (see FingerprintHash ssh config option). Here we only -+ // support SHA256. -+ let mut remote_fingerprint = cargo_util::Sha256::new(); -+ remote_fingerprint.update(remote_host_key); -+ let remote_fingerprint = -+ base64::encode_config(remote_fingerprint.finish(), base64::STANDARD_NO_PAD); -+ let remote_host_key = base64::encode(remote_host_key); -+ // FIXME: Ideally the error message should include the IP address of the -+ // remote host (to help the user validate that they are connecting to the -+ // host they were expecting to). However, I don't see a way to obtain that -+ // information from libgit2. -+ match changed_key { -+ Some(old_known_host) => Err(KnownHostError::HostKeyHasChanged { -+ hostname: host.to_string(), -+ key_type: remote_key_type, -+ old_known_host, -+ remote_host_key, -+ remote_fingerprint, -+ }), -+ None => Err(KnownHostError::HostKeyNotFound { -+ hostname: host.to_string(), -+ key_type: remote_key_type, -+ remote_host_key, -+ remote_fingerprint, -+ other_hosts, -+ }), -+ } -+} -+ -+/// Returns a list of files to try loading OpenSSH-formatted known hosts. -+fn known_host_files() -> Vec { -+ let mut result = Vec::new(); -+ if cfg!(unix) { -+ result.push(PathBuf::from("/etc/ssh/ssh_known_hosts")); -+ } else if cfg!(windows) { -+ // The msys/cygwin version of OpenSSH uses `/etc` from the posix root -+ // filesystem there (such as `C:\msys64\etc\ssh\ssh_known_hosts`). -+ // However, I do not know of a way to obtain that location from -+ // Windows-land. The ProgramData version here is what the PowerShell -+ // port of OpenSSH does. -+ if let Some(progdata) = std::env::var_os("ProgramData") { -+ let mut progdata = PathBuf::from(progdata); -+ progdata.push("ssh"); -+ progdata.push("ssh_known_hosts"); -+ result.push(progdata) -+ } -+ } -+ result.extend(user_known_host_location()); -+ result -+} -+ -+/// The location of the user's known_hosts file. -+fn user_known_host_location() -> Option { -+ // NOTE: This is a potentially inaccurate prediction of what the user -+ // actually wants. The actual location depends on several factors: -+ // -+ // - Windows OpenSSH Powershell version: I believe this looks up the home -+ // directory via ProfileImagePath in the registry, falling back to -+ // `GetWindowsDirectoryW` if that fails. -+ // - OpenSSH Portable (under msys): This is very complicated. I got lost -+ // after following it through some ldap/active directory stuff. -+ // - OpenSSH (most unix platforms): Uses `pw->pw_dir` from `getpwuid()`. -+ // -+ // This doesn't do anything close to that. home_dir's behavior is: -+ // - Windows: $USERPROFILE, or SHGetFolderPathW() -+ // - Unix: $HOME, or getpwuid_r() -+ // -+ // Since there is a mismatch here, the location returned here might be -+ // different than what the user's `ssh` CLI command uses. We may want to -+ // consider trying to align it better. -+ home::home_dir().map(|mut home| { -+ home.push(".ssh"); -+ home.push("known_hosts"); -+ home -+ }) -+} -+ -+/// The location to display in an error message instructing the user where to -+/// add the new key. -+fn user_known_host_location_to_add() -> String { -+ // Note that we don't bother with the legacy known_hosts2 files. -+ match user_known_host_location() { -+ Some(path) => path.to_str().expect("utf-8 home").to_string(), -+ None => "~/.ssh/known_hosts".to_string(), -+ } -+} -+ -+/// A single known host entry. -+#[derive(Clone)] -+struct KnownHost { -+ location: KnownHostLocation, -+ /// The hostname. May be comma separated to match multiple hosts. -+ patterns: String, -+ key_type: String, -+ key: Vec, -+} -+ -+impl KnownHost { -+ /// Returns whether or not the given host matches this known host entry. -+ fn host_matches(&self, host: &str) -> bool { -+ let mut match_found = false; -+ let host = host.to_lowercase(); -+ // FIXME: support hashed hostnames -+ for pattern in self.patterns.split(',') { -+ let pattern = pattern.to_lowercase(); -+ // FIXME: support * and ? wildcards -+ if let Some(pattern) = pattern.strip_prefix('!') { -+ if pattern == host { -+ return false; -+ } -+ } else { -+ match_found = pattern == host; -+ } -+ } -+ match_found -+ } -+} -+ -+/// Loads an OpenSSH known_hosts file. -+fn load_hostfile(path: &Path) -> Result, anyhow::Error> { -+ let contents = cargo_util::paths::read(path)?; -+ let entries = contents -+ .lines() -+ .enumerate() -+ .filter_map(|(lineno, line)| { -+ let location = KnownHostLocation::File { -+ path: path.to_path_buf(), -+ lineno: lineno as u32 + 1, -+ }; -+ parse_known_hosts_line(line, location) -+ }) -+ .collect(); -+ Ok(entries) -+} -+ -+fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option { -+ let line = line.trim(); -+ // FIXME: @revoked and @cert-authority is currently not supported. -+ if line.is_empty() || line.starts_with('#') || line.starts_with('@') { -+ return None; -+ } -+ let mut parts = line.split([' ', '\t']).filter(|s| !s.is_empty()); -+ let Some(patterns) = parts.next() else { return None }; -+ let Some(key_type) = parts.next() else { return None }; -+ let Some(key) = parts.next() else { return None }; -+ let Ok(key) = base64::decode(key) else { return None }; -+ Some(KnownHost { -+ location, -+ patterns: patterns.to_string(), -+ key_type: key_type.to_string(), -+ key, -+ }) -+} -Index: cargo/src/cargo/sources/git/mod.rs -=================================================================== ---- cargo.orig/src/cargo/sources/git/mod.rs -+++ cargo/src/cargo/sources/git/mod.rs -@@ -1,4 +1,5 @@ - pub use self::source::GitSource; - pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote}; -+mod known_hosts; - mod source; - mod utils; -Index: cargo/src/cargo/sources/git/utils.rs -=================================================================== ---- cargo.orig/src/cargo/sources/git/utils.rs -+++ cargo/src/cargo/sources/git/utils.rs -@@ -647,7 +647,6 @@ where - | ErrorClass::Submodule - | ErrorClass::FetchHead - | ErrorClass::Ssh -- | ErrorClass::Callback - | ErrorClass::Http => { - let mut msg = "network failure seems to have happened\n".to_string(); - msg.push_str( -@@ -658,6 +657,13 @@ where - ); - err = err.context(msg); - } -+ ErrorClass::Callback => { -+ // This unwraps the git2 error. We're using the callback error -+ // specifically to convey errors from Rust land through the C -+ // callback interface. We don't need the `; class=Callback -+ // (26)` that gets tacked on to the git2 error message. -+ err = anyhow::format_err!("{}", e.message()); -+ } - _ => {} - } - } -@@ -686,12 +692,16 @@ pub fn with_fetch_options( - let mut progress = Progress::new("Fetch", config); - network::with_retry(config, || { - with_authentication(url, git_config, |f| { -+ let port = Url::parse(url).ok().and_then(|url| url.port()); - let mut last_update = Instant::now(); - let mut rcb = git2::RemoteCallbacks::new(); - // We choose `N=10` here to make a `300ms * 10slots ~= 3000ms` - // sliding window for tracking the data transfer rate (in bytes/s). - let mut counter = MetricsCounter::<10>::new(0, last_update); - rcb.credentials(f); -+ rcb.certificate_check(|cert, host| { -+ super::known_hosts::certificate_check(cert, host, port) -+ }); - rcb.transfer_progress(|stats| { - let indexed_deltas = stats.indexed_deltas(); - let msg = if indexed_deltas > 0 { -Index: cargo/src/doc/src/appendix/git-authentication.md -=================================================================== ---- cargo.orig/src/doc/src/appendix/git-authentication.md -+++ cargo/src/doc/src/appendix/git-authentication.md -@@ -58,9 +58,32 @@ on how to start `ssh-agent` and to add k - > used by Cargo's built-in SSH library. More advanced requirements should use - > [`net.git-fetch-with-cli`]. - -+### SSH Known Hosts -+ -+When connecting to an SSH host, Cargo must verify the identity of the host -+using "known hosts", which are a list of host keys. Cargo can look for these -+known hosts in OpenSSH-style `known_hosts` files located in their standard -+locations (`.ssh/known_hosts` in your home directory, or -+`/etc/ssh/ssh_known_hosts` on Unix-like platforms or -+`%PROGRAMDATA%\ssh\ssh_known_hosts` on Windows). More information about these -+files can be found in the [sshd man page]. -+ -+When connecting to an SSH host before the known hosts has been configured, -+Cargo will display an error message instructing you how to add the host key. -+This also includes a "fingerprint", which is a smaller hash of the host key, -+which should be easier to visually verify. The server administrator can get -+the fingerprint by running `ssh-keygen` against the public key (for example, -+`ssh-keygen -l -f /etc/ssh/ssh_host_ecdsa_key.pub`). Well-known sites may -+publish their fingerprints on the web; for example GitHub posts theirs at -+. -+ -+Cargo comes with the host keys for [github.com](https://github.com) built-in. -+If those ever change, you can add the new keys to your known_hosts file. -+ - [`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage - [`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli - [GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/ - [PuTTY]: https://www.chiark.greenend.org.uk/~sgtatham/putty/ - [Microsoft installation documentation]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse - [key management]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_keymanagement -+[sshd man page]: https://man.openbsd.org/sshd#SSH_KNOWN_HOSTS_FILE_FORMAT ---- rust-cargo-0.66.0.orig/Cargo.toml -+++ rust-cargo-0.66.0/Cargo.toml -@@ -17,6 +17,7 @@ path = "src/cargo/lib.rs" - - [dependencies] - atty = "0.2" -+base64 = "0.13" - bytesize = "1.0" - cargo-platform = { path = "crates/cargo-platform", version = "0.1.2" } - cargo-util = { path = "crates/cargo-util", version = "0.2.1" } -@@ -28,8 +29,8 @@ pretty_env_logger = { version = "0.4", o - anyhow = "1.0" - filetime = "0.2.9" - flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } --git2 = "0.15.0" --git2-curl = "0.16.0" -+git2 = "0.16.0" -+git2-curl = "0.17.0" - glob = "0.3.0" - hex = "0.4" - home = "0.5" -@@ -41,7 +42,7 @@ jobserver = "0.1.24" - lazycell = "1.2.0" - libc = "0.2" - log = "0.4.6" --libgit2-sys = "0.14.0" -+libgit2-sys = "0.14.1" - memchr = "2.1.3" - opener = "0.5" - os_info = "3.5.0" - ---- cargo-0.66/crates/cargo-test-support/Cargo.toml.orig 2023-01-11 11:33:00.584077593 +0100 -+++ cargo-0.66/crates/cargo-test-support/Cargo.toml 2023-01-11 11:33:12.564917363 +0100 -@@ -14,7 +14,7 @@ cargo-util = { path = "../cargo-util" } - snapbox = { version = "0.3.0", features = ["diff", "path"] } - filetime = "0.2" - flate2 = { version = "1.0", default-features = false, features = ["zlib"] } --git2 = "0.15.0" -+git2 = "0.16.0" - glob = "0.3" - itertools = "0.10.0" - lazy_static = "1.0" - diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-02-add-support-for-deserializing-vec-value-string.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-02-add-support-for-deserializing-vec-value-string.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-02-add-support-for-deserializing-vec-value-string.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-02-add-support-for-deserializing-vec-value-string.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,157 +0,0 @@ -commit 9f62f8440e9e542f27d60c75be38ac51186c6c32 -Author: Eric Huss -Date: Fri Dec 9 20:03:27 2022 -0800 - - Add support for deserializing Vec> in config. - - This adds the ability to track the definition location of a string - in a TOML array. - -diff --git a/src/cargo/util/config/de.rs b/src/cargo/util/config/de.rs -index 6fddc7e71f..1408f15b57 100644 ---- a/src/cargo/util/config/de.rs -+++ b/src/cargo/util/config/de.rs -@@ -384,7 +384,12 @@ impl<'de> de::SeqAccess<'de> for ConfigSeqAccess { - { - match self.list_iter.next() { - // TODO: add `def` to error? -- Some((value, _def)) => seed.deserialize(value.into_deserializer()).map(Some), -+ Some((value, def)) => { -+ // This might be a String or a Value. -+ // ValueDeserializer will handle figuring out which one it is. -+ let maybe_value_de = ValueDeserializer::new_with_string(value, def); -+ seed.deserialize(maybe_value_de).map(Some) -+ } - None => Ok(None), - } - } -@@ -400,7 +405,17 @@ impl<'de> de::SeqAccess<'de> for ConfigSeqAccess { - struct ValueDeserializer<'config> { - hits: u32, - definition: Definition, -- de: Deserializer<'config>, -+ /// The deserializer, used to actually deserialize a Value struct. -+ /// This is `None` if deserializing a string. -+ de: Option>, -+ /// A string value to deserialize. -+ /// -+ /// This is used for situations where you can't address a string via a -+ /// TOML key, such as a string inside an array. The `ConfigSeqAccess` -+ /// doesn't know if the type it should deserialize to is a `String` or -+ /// `Value`, so `ValueDeserializer` needs to be able to handle -+ /// both. -+ str_value: Option, - } - - impl<'config> ValueDeserializer<'config> { -@@ -428,9 +443,19 @@ impl<'config> ValueDeserializer<'config> { - Ok(ValueDeserializer { - hits: 0, - definition, -- de, -+ de: Some(de), -+ str_value: None, - }) - } -+ -+ fn new_with_string(s: String, definition: Definition) -> ValueDeserializer<'config> { -+ ValueDeserializer { -+ hits: 0, -+ definition, -+ de: None, -+ str_value: Some(s), -+ } -+ } - } - - impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> { -@@ -459,9 +484,14 @@ impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> { - // If this is the first time around we deserialize the `value` field - // which is the actual deserializer - if self.hits == 1 { -- return seed -- .deserialize(self.de.clone()) -- .map_err(|e| e.with_key_context(&self.de.key, self.definition.clone())); -+ if let Some(de) = &self.de { -+ return seed -+ .deserialize(de.clone()) -+ .map_err(|e| e.with_key_context(&de.key, self.definition.clone())); -+ } else { -+ return seed -+ .deserialize(self.str_value.as_ref().unwrap().clone().into_deserializer()); -+ } - } - - // ... otherwise we're deserializing the `definition` field, so we need -@@ -484,6 +514,71 @@ impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> { - } - } - -+// Deserializer is only implemented to handle deserializing a String inside a -+// sequence (like `Vec` or `Vec>`). `Value` is -+// handled by deserialize_struct, and the plain `String` is handled by all the -+// other functions here. -+impl<'de, 'config> de::Deserializer<'de> for ValueDeserializer<'config> { -+ type Error = ConfigError; -+ -+ fn deserialize_str(self, visitor: V) -> Result -+ where -+ V: de::Visitor<'de>, -+ { -+ visitor.visit_str(&self.str_value.expect("string expected")) -+ } -+ -+ fn deserialize_string(self, visitor: V) -> Result -+ where -+ V: de::Visitor<'de>, -+ { -+ visitor.visit_string(self.str_value.expect("string expected")) -+ } -+ -+ fn deserialize_struct( -+ self, -+ name: &'static str, -+ fields: &'static [&'static str], -+ visitor: V, -+ ) -> Result -+ where -+ V: de::Visitor<'de>, -+ { -+ // Match on the magical struct name/field names that are passed in to -+ // detect when we're deserializing `Value`. -+ // -+ // See more comments in `value.rs` for the protocol used here. -+ if name == value::NAME && fields == value::FIELDS { -+ return visitor.visit_map(self); -+ } -+ unimplemented!("only strings and Value can be deserialized from a sequence"); -+ } -+ -+ fn deserialize_any(self, visitor: V) -> Result -+ where -+ V: de::Visitor<'de>, -+ { -+ visitor.visit_string(self.str_value.expect("string expected")) -+ } -+ -+ fn deserialize_ignored_any(self, visitor: V) -> Result -+ where -+ V: de::Visitor<'de>, -+ { -+ visitor.visit_unit() -+ } -+ -+ serde::forward_to_deserialize_any! { -+ i8 i16 i32 i64 -+ u8 u16 u32 u64 -+ option -+ newtype_struct seq tuple tuple_struct map enum bool -+ f32 f64 char bytes -+ byte_buf unit unit_struct -+ identifier -+ } -+} -+ - /// A deserializer which takes two values and deserializes into a tuple of those - /// two values. This is similar to types like `StrDeserializer` in upstream - /// serde itself. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-03-support-configuring-ssh-known-hosts.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-03-support-configuring-ssh-known-hosts.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-03-support-configuring-ssh-known-hosts.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-03-support-configuring-ssh-known-hosts.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,299 +0,0 @@ -commit 026bda3fb5eddac0df111ee150706f756558a7b3 -Author: Eric Huss -Date: Fri Dec 9 20:38:12 2022 -0800 - - Support configuring ssh known-hosts via cargo config. - -diff --git a/src/cargo/sources/git/known_hosts.rs b/src/cargo/sources/git/known_hosts.rs -index 875dcf63f3..7efea43c3b 100644 ---- a/src/cargo/sources/git/known_hosts.rs -+++ b/src/cargo/sources/git/known_hosts.rs -@@ -20,6 +20,7 @@ - //! hostname patterns, and revoked markers. See "FIXME" comments littered in - //! this file. - -+use crate::util::config::{Definition, Value}; - use git2::cert::Cert; - use git2::CertificateCheckStatus; - use std::collections::HashSet; -@@ -74,6 +75,8 @@ impl From for KnownHostError { - enum KnownHostLocation { - /// Loaded from a file from disk. - File { path: PathBuf, lineno: u32 }, -+ /// Loaded from cargo's config system. -+ Config { definition: Definition }, - /// Part of the hard-coded bundled keys in Cargo. - Bundled, - } -@@ -83,6 +86,8 @@ pub fn certificate_check( - cert: &Cert<'_>, - host: &str, - port: Option, -+ config_known_hosts: Option<&Vec>>, -+ diagnostic_home_config: &str, - ) -> Result { - let Some(host_key) = cert.as_hostkey() else { - // Return passthrough for TLS X509 certificates to use whatever validation -@@ -96,7 +101,7 @@ pub fn certificate_check( - _ => host.to_string(), - }; - // The error message must be constructed as a string to pass through the libgit2 C API. -- let err_msg = match check_ssh_known_hosts(host_key, &host_maybe_port) { -+ let err_msg = match check_ssh_known_hosts(host_key, &host_maybe_port, config_known_hosts) { - Ok(()) => { - return Ok(CertificateCheckStatus::CertificateOk); - } -@@ -113,13 +118,13 @@ pub fn certificate_check( - // Try checking without the port. - if port.is_some() - && !matches!(port, Some(22)) -- && check_ssh_known_hosts(host_key, host).is_ok() -+ && check_ssh_known_hosts(host_key, host, config_known_hosts).is_ok() - { - return Ok(CertificateCheckStatus::CertificateOk); - } - let key_type_short_name = key_type.short_name(); - let key_type_name = key_type.name(); -- let known_hosts_location = user_known_host_location_to_add(); -+ let known_hosts_location = user_known_host_location_to_add(diagnostic_home_config); - let other_hosts_message = if other_hosts.is_empty() { - String::new() - } else { -@@ -132,6 +137,9 @@ pub fn certificate_check( - KnownHostLocation::File { path, lineno } => { - format!("{} line {lineno}", path.display()) - } -+ KnownHostLocation::Config { definition } => { -+ format!("config value from {definition}") -+ } - KnownHostLocation::Bundled => format!("bundled with cargo"), - }; - write!(msg, " {loc}: {}\n", known_host.patterns).unwrap(); -@@ -163,7 +171,7 @@ pub fn certificate_check( - }) => { - let key_type_short_name = key_type.short_name(); - let key_type_name = key_type.name(); -- let known_hosts_location = user_known_host_location_to_add(); -+ let known_hosts_location = user_known_host_location_to_add(diagnostic_home_config); - let old_key_resolution = match old_known_host.location { - KnownHostLocation::File { path, lineno } => { - let old_key_location = path.display(); -@@ -173,6 +181,13 @@ pub fn certificate_check( - and adding the new key to {known_hosts_location}", - ) - } -+ KnownHostLocation::Config { definition } => { -+ format!( -+ "removing the old {key_type_name} key for `{hostname}` \ -+ loaded from Cargo's config at {definition}, \ -+ and adding the new key to {known_hosts_location}" -+ ) -+ } - KnownHostLocation::Bundled => { - format!( - "adding the new key to {known_hosts_location}\n\ -@@ -217,6 +232,7 @@ pub fn certificate_check( - fn check_ssh_known_hosts( - cert_host_key: &git2::cert::CertHostkey<'_>, - host: &str, -+ config_known_hosts: Option<&Vec>>, - ) -> Result<(), KnownHostError> { - let Some(remote_host_key) = cert_host_key.hostkey() else { - return Err(anyhow::format_err!("remote host key is not available").into()); -@@ -237,6 +253,23 @@ fn check_ssh_known_hosts( - let hosts = load_hostfile(&path)?; - known_hosts.extend(hosts); - } -+ if let Some(config_known_hosts) = config_known_hosts { -+ // Format errors aren't an error in case the format needs to change in -+ // the future, to retain forwards compatibility. -+ for line_value in config_known_hosts { -+ let location = KnownHostLocation::Config { -+ definition: line_value.definition.clone(), -+ }; -+ match parse_known_hosts_line(&line_value.val, location) { -+ Some(known_host) => known_hosts.push(known_host), -+ None => log::warn!( -+ "failed to parse known host {} from {}", -+ line_value.val, -+ line_value.definition -+ ), -+ } -+ } -+ } - // Load the bundled keys. Don't add keys for hosts that the user has - // configured, which gives them the option to override them. This could be - // useful if the keys are ever revoked. -@@ -363,12 +396,18 @@ fn user_known_host_location() -> Option { - - /// The location to display in an error message instructing the user where to - /// add the new key. --fn user_known_host_location_to_add() -> String { -+fn user_known_host_location_to_add(diagnostic_home_config: &str) -> String { - // Note that we don't bother with the legacy known_hosts2 files. -- match user_known_host_location() { -- Some(path) => path.to_str().expect("utf-8 home").to_string(), -- None => "~/.ssh/known_hosts".to_string(), -- } -+ let user = user_known_host_location(); -+ let openssh_loc = match &user { -+ Some(path) => path.to_str().expect("utf-8 home"), -+ None => "~/.ssh/known_hosts", -+ }; -+ format!( -+ "the `net.ssh.known-hosts` array in your Cargo configuration \ -+ (such as {diagnostic_home_config}) \ -+ or in your OpenSSH known_hosts file at {openssh_loc}" -+ ) - } - - /// A single known host entry. -diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs -index 831c43be6b..457c97c5bb 100644 ---- a/src/cargo/sources/git/utils.rs -+++ b/src/cargo/sources/git/utils.rs -@@ -726,6 +726,9 @@ pub fn with_fetch_options( - cb: &mut dyn FnMut(git2::FetchOptions<'_>) -> CargoResult<()>, - ) -> CargoResult<()> { - let mut progress = Progress::new("Fetch", config); -+ let ssh_config = config.net_config()?.ssh.as_ref(); -+ let config_known_hosts = ssh_config.and_then(|ssh| ssh.known_hosts.as_ref()); -+ let diagnostic_home_config = config.diagnostic_home_config(); - network::with_retry(config, || { - with_authentication(url, git_config, |f| { - let port = Url::parse(url).ok().and_then(|url| url.port()); -@@ -736,7 +739,13 @@ pub fn with_fetch_options( - let mut counter = MetricsCounter::<10>::new(0, last_update); - rcb.credentials(f); - rcb.certificate_check(|cert, host| { -- super::known_hosts::certificate_check(cert, host, port) -+ super::known_hosts::certificate_check( -+ cert, -+ host, -+ port, -+ config_known_hosts, -+ &diagnostic_home_config, -+ ) - }); - rcb.transfer_progress(|stats| { - let indexed_deltas = stats.indexed_deltas(); -diff --git a/src/cargo/util/config/mod.rs b/src/cargo/util/config/mod.rs -index d30e094413..d9ab142c4e 100644 ---- a/src/cargo/util/config/mod.rs -+++ b/src/cargo/util/config/mod.rs -@@ -356,6 +356,18 @@ impl Config { - &self.home_path - } - -+ /// Returns a path to display to the user with the location of their home -+ /// config file (to only be used for displaying a diagnostics suggestion, -+ /// such as recommending where to add a config value). -+ pub fn diagnostic_home_config(&self) -> String { -+ let home = self.home_path.as_path_unlocked(); -+ let path = match self.get_file_path(home, "config", false) { -+ Ok(Some(existing_path)) => existing_path, -+ _ => home.join("config.toml"), -+ }; -+ path.to_string_lossy().to_string() -+ } -+ - /// Gets the Cargo Git directory (`/git`). - pub fn git_path(&self) -> Filesystem { - self.home_path.join("git") -@@ -2356,6 +2368,13 @@ pub struct CargoNetConfig { - pub retry: Option, - pub offline: Option, - pub git_fetch_with_cli: Option, -+ pub ssh: Option, -+} -+ -+#[derive(Debug, Deserialize)] -+#[serde(rename_all = "kebab-case")] -+pub struct CargoSshConfig { -+ pub known_hosts: Option>>, - } - - #[derive(Debug, Deserialize)] -diff --git a/src/doc/src/appendix/git-authentication.md b/src/doc/src/appendix/git-authentication.md -index a7db1ac7f1..f46a6535c6 100644 ---- a/src/doc/src/appendix/git-authentication.md -+++ b/src/doc/src/appendix/git-authentication.md -@@ -66,7 +66,8 @@ known hosts in OpenSSH-style `known_hosts` files located in their standard - locations (`.ssh/known_hosts` in your home directory, or - `/etc/ssh/ssh_known_hosts` on Unix-like platforms or - `%PROGRAMDATA%\ssh\ssh_known_hosts` on Windows). More information about these --files can be found in the [sshd man page]. -+files can be found in the [sshd man page]. Alternatively, keys may be -+configured in a Cargo configuration file with [`net.ssh.known-hosts`]. - - When connecting to an SSH host before the known hosts has been configured, - Cargo will display an error message instructing you how to add the host key. -@@ -78,10 +79,11 @@ publish their fingerprints on the web; for example GitHub posts theirs at - . - - Cargo comes with the host keys for [github.com](https://github.com) built-in. --If those ever change, you can add the new keys to your known_hosts file. -+If those ever change, you can add the new keys to the config or known_hosts file. - - [`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage - [`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli -+[`net.ssh.known-hosts`]: ../reference/config.md#netsshknown-hosts - [GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/ - [PuTTY]: https://www.chiark.greenend.org.uk/~sgtatham/putty/ - [Microsoft installation documentation]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse -diff --git a/src/doc/src/reference/config.md b/src/doc/src/reference/config.md -index 1e50648797..f804ceebea 100644 ---- a/src/doc/src/reference/config.md -+++ b/src/doc/src/reference/config.md -@@ -114,6 +114,9 @@ retry = 2 # network retries - git-fetch-with-cli = true # use the `git` executable for git operations - offline = true # do not access the network - -+[net.ssh] -+known-hosts = ["..."] # known SSH host keys -+ - [patch.] - # Same keys as for [patch] in Cargo.toml - -@@ -750,6 +753,41 @@ needed, and generate an error if it encounters a network error. - - Can be overridden with the `--offline` command-line option. - -+##### `net.ssh` -+ -+The `[net.ssh]` table contains settings for SSH connections. -+ -+##### `net.ssh.known-hosts` -+* Type: array of strings -+* Default: see description -+* Environment: not supported -+ -+The `known-hosts` array contains a list of SSH host keys that should be -+accepted as valid when connecting to an SSH server (such as for SSH git -+dependencies). Each entry should be a string in a format similar to OpenSSH -+`known_hosts` files. Each string should start with one or more hostnames -+separated by commas, a space, the key type name, a space, and the -+base64-encoded key. For example: -+ -+```toml -+[net.ssh] -+known-hosts = [ -+ "example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFO4Q5T0UV0SQevair9PFwoxY9dl4pQl3u5phoqJH3cF" -+] -+``` -+ -+Cargo will attempt to load known hosts keys from common locations supported in -+OpenSSH, and will join those with any listed in a Cargo configuration file. -+If any matching entry has the correct key, the connection will be allowed. -+ -+Cargo comes with the host keys for [github.com][github-keys] built-in. If -+those ever change, you can add the new keys to the config or known_hosts file. -+ -+See [Git Authentication](../appendix/git-authentication.md#ssh-known-hosts) -+for more details. -+ -+[github-keys]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints -+ - #### `[patch]` - - Just as you can override dependencies using [`[patch]` in diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-04-add-some-known-hosts-tests-and-fix-comma-bug.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-04-add-some-known-hosts-tests-and-fix-comma-bug.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-04-add-some-known-hosts-tests-and-fix-comma-bug.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-04-add-some-known-hosts-tests-and-fix-comma-bug.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,244 +0,0 @@ -commit 302a543ddf3b7621c2f10623862029d35fae7e3c -Author: Eric Huss -Date: Mon Dec 12 20:14:23 2022 -0800 - - Add some known_hosts tests. - - This also fixes a bug with the host matching when there are comma-separated hosts. - -diff --git a/src/cargo/sources/git/known_hosts.rs b/src/cargo/sources/git/known_hosts.rs -index 7efea43c3b..58e64e7913 100644 ---- a/src/cargo/sources/git/known_hosts.rs -+++ b/src/cargo/sources/git/known_hosts.rs -@@ -21,7 +21,7 @@ - //! this file. - - use crate::util::config::{Definition, Value}; --use git2::cert::Cert; -+use git2::cert::{Cert, SshHostKeyType}; - use git2::CertificateCheckStatus; - use std::collections::HashSet; - use std::fmt::Write; -@@ -49,7 +49,7 @@ enum KnownHostError { - /// The host key was not found. - HostKeyNotFound { - hostname: String, -- key_type: git2::cert::SshHostKeyType, -+ key_type: SshHostKeyType, - remote_host_key: String, - remote_fingerprint: String, - other_hosts: Vec, -@@ -57,7 +57,7 @@ enum KnownHostError { - /// The host key was found, but does not match the remote's key. - HostKeyHasChanged { - hostname: String, -- key_type: git2::cert::SshHostKeyType, -+ key_type: SshHostKeyType, - old_known_host: KnownHost, - remote_host_key: String, - remote_fingerprint: String, -@@ -238,11 +238,6 @@ fn check_ssh_known_hosts( - return Err(anyhow::format_err!("remote host key is not available").into()); - }; - let remote_key_type = cert_host_key.hostkey_type().unwrap(); -- // `changed_key` keeps track of any entries where the key has changed. -- let mut changed_key = None; -- // `other_hosts` keeps track of any entries that have an identical key, -- // but a different hostname. -- let mut other_hosts = Vec::new(); - - // Collect all the known host entries from disk. - let mut known_hosts = Vec::new(); -@@ -293,6 +288,21 @@ fn check_ssh_known_hosts( - }); - } - } -+ check_ssh_known_hosts_loaded(&known_hosts, host, remote_key_type, remote_host_key) -+} -+ -+/// Checks a host key against a loaded set of known hosts. -+fn check_ssh_known_hosts_loaded( -+ known_hosts: &[KnownHost], -+ host: &str, -+ remote_key_type: SshHostKeyType, -+ remote_host_key: &[u8], -+) -> Result<(), KnownHostError> { -+ // `changed_key` keeps track of any entries where the key has changed. -+ let mut changed_key = None; -+ // `other_hosts` keeps track of any entries that have an identical key, -+ // but a different hostname. -+ let mut other_hosts = Vec::new(); - - for known_host in known_hosts { - // The key type from libgit2 needs to match the key type from the host file. -@@ -301,7 +311,6 @@ fn check_ssh_known_hosts( - } - let key_matches = known_host.key == remote_host_key; - if !known_host.host_matches(host) { -- // `name` can be None for hashed hostnames (which libgit2 does not expose). - if key_matches { - other_hosts.push(known_host.clone()); - } -@@ -434,7 +443,7 @@ impl KnownHost { - return false; - } - } else { -- match_found = pattern == host; -+ match_found |= pattern == host; - } - } - match_found -@@ -444,6 +453,10 @@ impl KnownHost { - /// Loads an OpenSSH known_hosts file. - fn load_hostfile(path: &Path) -> Result, anyhow::Error> { - let contents = cargo_util::paths::read(path)?; -+ Ok(load_hostfile_contents(path, &contents)) -+} -+ -+fn load_hostfile_contents(path: &Path, contents: &str) -> Vec { - let entries = contents - .lines() - .enumerate() -@@ -455,13 +468,13 @@ fn load_hostfile(path: &Path) -> Result, anyhow::Error> { - parse_known_hosts_line(line, location) - }) - .collect(); -- Ok(entries) -+ entries - } - - fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option { - let line = line.trim(); - // FIXME: @revoked and @cert-authority is currently not supported. -- if line.is_empty() || line.starts_with('#') || line.starts_with('@') { -+ if line.is_empty() || line.starts_with(['#', '@', '|']) { - return None; - } - let mut parts = line.split([' ', '\t']).filter(|s| !s.is_empty()); -@@ -476,3 +489,126 @@ fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option { -+ assert_eq!(path, kh_path); -+ assert_eq!(*lineno, 4); -+ } -+ _ => panic!("unexpected"), -+ } -+ assert_eq!(khs[0].patterns, "example.com,rust-lang.org"); -+ assert_eq!(khs[0].key_type, "ssh-rsa"); -+ assert_eq!(khs[0].key.len(), 407); -+ assert_eq!(&khs[0].key[..30], b"\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x03\x01\x00\x01\x00\x00\x01\x81\x00\xb935\x88\xa5\x9c)"); -+ match &khs[1].location { -+ KnownHostLocation::File { path, lineno } => { -+ assert_eq!(path, kh_path); -+ assert_eq!(*lineno, 5); -+ } -+ _ => panic!("unexpected"), -+ } -+ assert_eq!(khs[2].patterns, "[example.net]:2222"); -+ assert_eq!(khs[3].patterns, "nistp256.example.org"); -+ assert_eq!(khs[7].patterns, "192.168.42.12"); -+ } -+ -+ #[test] -+ fn host_matches() { -+ let kh_path = Path::new("/home/abc/.known_hosts"); -+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS); -+ assert!(khs[0].host_matches("example.com")); -+ assert!(khs[0].host_matches("rust-lang.org")); -+ assert!(khs[0].host_matches("EXAMPLE.COM")); -+ assert!(khs[1].host_matches("example.net")); -+ assert!(!khs[0].host_matches("example.net")); -+ assert!(khs[2].host_matches("[example.net]:2222")); -+ assert!(!khs[2].host_matches("example.net")); -+ assert!(!khs[8].host_matches("neg.example.com")); -+ } -+ -+ #[test] -+ fn check_match() { -+ let kh_path = Path::new("/home/abc/.known_hosts"); -+ let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS); -+ -+ assert!(check_ssh_known_hosts_loaded( -+ &khs, -+ "example.com", -+ SshHostKeyType::Rsa, -+ &khs[0].key -+ ) -+ .is_ok()); -+ -+ match check_ssh_known_hosts_loaded(&khs, "example.com", SshHostKeyType::Dss, &khs[0].key) { -+ Err(KnownHostError::HostKeyNotFound { -+ hostname, -+ remote_fingerprint, -+ other_hosts, -+ .. -+ }) => { -+ assert_eq!( -+ remote_fingerprint, -+ "yn+pONDn0EcgdOCVptgB4RZd/wqmsVKrPnQMLtrvhw8" -+ ); -+ assert_eq!(hostname, "example.com"); -+ assert_eq!(other_hosts.len(), 0); -+ } -+ _ => panic!("unexpected"), -+ } -+ -+ match check_ssh_known_hosts_loaded( -+ &khs, -+ "foo.example.com", -+ SshHostKeyType::Rsa, -+ &khs[0].key, -+ ) { -+ Err(KnownHostError::HostKeyNotFound { other_hosts, .. }) => { -+ assert_eq!(other_hosts.len(), 1); -+ assert_eq!(other_hosts[0].patterns, "example.com,rust-lang.org"); -+ } -+ _ => panic!("unexpected"), -+ } -+ -+ let mut modified_key = khs[0].key.clone(); -+ modified_key[0] = 1; -+ match check_ssh_known_hosts_loaded(&khs, "example.com", SshHostKeyType::Rsa, &modified_key) -+ { -+ Err(KnownHostError::HostKeyHasChanged { old_known_host, .. }) => { -+ assert!(matches!( -+ old_known_host.location, -+ KnownHostLocation::File { lineno: 4, .. } -+ )); -+ } -+ _ => panic!("unexpected"), -+ } -+ } -+} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-05-remove-let-else.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-05-remove-let-else.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-05-remove-let-else.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-05-remove-let-else.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -commit cf716fc3c2b0785013b321f08d6cf9e277f89c84 -Author: Eric Huss -Date: Tue Dec 13 08:14:59 2022 -0800 - - Remove let-else, just use ? propagation. - - Co-authored-by: Weihang Lo - -diff --git a/src/cargo/sources/git/known_hosts.rs b/src/cargo/sources/git/known_hosts.rs -index 58e64e7913..f272195306 100644 ---- a/src/cargo/sources/git/known_hosts.rs -+++ b/src/cargo/sources/git/known_hosts.rs -@@ -478,10 +478,9 @@ fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option -Date: Wed Dec 14 19:01:40 2022 -0800 - - Add test for config Value in TOML array. - -diff --git a/tests/testsuite/config.rs b/tests/testsuite/config.rs -index b1d07bb405..d1487833f7 100644 ---- a/tests/testsuite/config.rs -+++ b/tests/testsuite/config.rs -@@ -1,7 +1,7 @@ - //! Tests for config settings. - - use cargo::core::{PackageIdSpec, Shell}; --use cargo::util::config::{self, Config, SslVersionConfig, StringList}; -+use cargo::util::config::{self, Config, Definition, SslVersionConfig, StringList}; - use cargo::util::interning::InternedString; - use cargo::util::toml::{self, VecStringOrBool as VSOB}; - use cargo::CargoResult; -@@ -1508,3 +1508,59 @@ fn all_profile_options() { - let roundtrip_toml = toml_edit::easy::to_string(&roundtrip).unwrap(); - compare::assert_match_exact(&profile_toml, &roundtrip_toml); - } -+ -+#[cargo_test] -+fn value_in_array() { -+ // Value in an array should work -+ let root_path = paths::root().join(".cargo/config.toml"); -+ write_config_at( -+ &root_path, -+ "\ -+[net.ssh] -+known-hosts = [ -+ \"example.com ...\", -+ \"example.net ...\", -+] -+", -+ ); -+ -+ let foo_path = paths::root().join("foo/.cargo/config.toml"); -+ write_config_at( -+ &foo_path, -+ "\ -+[net.ssh] -+known-hosts = [ -+ \"example.org ...\", -+] -+", -+ ); -+ -+ let config = ConfigBuilder::new() -+ .cwd("foo") -+ // environment variables don't actually work for known-hosts due to -+ // space splitting, but this is included here just to validate that -+ // they work (particularly if other Vec config vars are added -+ // in the future). -+ .env("CARGO_NET_SSH_KNOWN_HOSTS", "env-example") -+ .build(); -+ let net_config = config.net_config().unwrap(); -+ let kh = net_config -+ .ssh -+ .as_ref() -+ .unwrap() -+ .known_hosts -+ .as_ref() -+ .unwrap(); -+ assert_eq!(kh.len(), 4); -+ assert_eq!(kh[0].val, "example.org ..."); -+ assert_eq!(kh[0].definition, Definition::Path(foo_path.clone())); -+ assert_eq!(kh[1].val, "example.com ..."); -+ assert_eq!(kh[1].definition, Definition::Path(root_path.clone())); -+ assert_eq!(kh[2].val, "example.net ..."); -+ assert_eq!(kh[2].definition, Definition::Path(root_path.clone())); -+ assert_eq!(kh[3].val, "env-example"); -+ assert_eq!( -+ kh[3].definition, -+ Definition::Environment("CARGO_NET_SSH_KNOWN_HOSTS".to_string()) -+ ); -+} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-07-support-hashed-hostnames.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-07-support-hashed-hostnames.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-07-support-hashed-hostnames.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-07-support-hashed-hostnames.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,125 +0,0 @@ -This patch is based on the upstream commit described below, adapted for use -in the Debian package by Peter Michael Green. - -commit 67ae2dcafea5955824b1f390568a5fa109424987 -Author: Eric Huss -Date: Wed Dec 28 15:52:10 2022 -0800 - - ssh known_hosts: support hashed hostnames - -Index: cargo/src/cargo/sources/git/known_hosts.rs -=================================================================== ---- cargo.orig/src/cargo/sources/git/known_hosts.rs -+++ cargo/src/cargo/sources/git/known_hosts.rs -@@ -16,13 +16,13 @@ - //! - `VerifyHostKeyDNS` — Uses SSHFP DNS records to fetch a host key. - //! - //! There's also a number of things that aren't supported but could be easily --//! added (it just adds a little complexity). For example, hashed hostnames, --//! hostname patterns, and revoked markers. See "FIXME" comments littered in --//! this file. -+//! added (it just adds a little complexity). For example, hostname patterns, -+//! and revoked markers. See "FIXME" comments littered in this file. - - use crate::util::config::{Definition, Value}; - use git2::cert::{Cert, SshHostKeyType}; - use git2::CertificateCheckStatus; -+use hmac::Mac; - use std::collections::HashSet; - use std::fmt::Write; - use std::path::{Path, PathBuf}; -@@ -419,6 +419,8 @@ fn user_known_host_location_to_add(diagn - ) - } - -+const HASH_HOSTNAME_PREFIX: &str = "|1|"; -+ - /// A single known host entry. - #[derive(Clone)] - struct KnownHost { -@@ -434,7 +436,9 @@ impl KnownHost { - fn host_matches(&self, host: &str) -> bool { - let mut match_found = false; - let host = host.to_lowercase(); -- // FIXME: support hashed hostnames -+ if let Some(hashed) = self.patterns.strip_prefix(HASH_HOSTNAME_PREFIX) { -+ return hashed_hostname_matches(&host, hashed); -+ } - for pattern in self.patterns.split(',') { - let pattern = pattern.to_lowercase(); - // FIXME: support * and ? wildcards -@@ -450,6 +454,16 @@ impl KnownHost { - } - } - -+fn hashed_hostname_matches(host: &str, hashed: &str) -> bool { -+ let Some((b64_salt, b64_host)) = hashed.split_once('|') else { return false; }; -+ let Ok(salt) = base64::decode(b64_salt) else { return false; }; -+ let Ok(hashed_host) = base64::decode(b64_host) else { return false; }; -+ let Ok(mut mac) = hmac::Hmac::::new_from_slice(&salt) else { return false; }; -+ mac.update(host.as_bytes()); -+ let result = mac.finalize().into_bytes(); -+ hashed_host == &result[..] -+} -+ - /// Loads an OpenSSH known_hosts file. - fn load_hostfile(path: &Path) -> Result, anyhow::Error> { - let contents = cargo_util::paths::read(path)?; -@@ -474,7 +488,7 @@ fn load_hostfile_contents(path: &Path, c - fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option { - let line = line.trim(); - // FIXME: @revoked and @cert-authority is currently not supported. -- if line.is_empty() || line.starts_with(['#', '@', '|']) { -+ if line.is_empty() || line.starts_with(['#', '@']) { - return None; - } - let mut parts = line.split([' ', '\t']).filter(|s| !s.is_empty()); -@@ -506,8 +520,7 @@ mod tests { - @revoked * ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKtQsi+KPYispwm2rkMidQf30fG1Niy8XNkvASfePoca eric@host - example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAWkjI6XT2SZh3xNk5NhisA3o3sGzWR+VAKMSqHtI0aY eric@host - 192.168.42.12 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR eric@host -- # Hash not yet supported. -- |1|7CMSYgzdwruFLRhwowMtKx0maIE=|Tlff1GFqc3Ao+fUWxMEVG8mJiyk= ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIHgN3O21U4LWtP5OzjTzPnUnSDmCNDvyvlaj6Hi65JC eric@host -+ |1|QxzZoTXIWLhUsuHAXjuDMIV3FjQ=|M6NCOIkjiWdCWqkh5+Q+/uFLGjs= ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIHgN3O21U4LWtP5OzjTzPnUnSDmCNDvyvlaj6Hi65JC eric@host - # Negation isn't terribly useful without globs. - neg.example.com,!neg.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOXfUnaAHTlo1Qi//rNk26OcmHikmkns1Z6WW/UuuS3K eric@host - "#; -@@ -516,7 +529,7 @@ mod tests { - fn known_hosts_parse() { - let kh_path = Path::new("/home/abc/.known_hosts"); - let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS); -- assert_eq!(khs.len(), 9); -+ assert_eq!(khs.len(), 10); - match &khs[0].location { - KnownHostLocation::File { path, lineno } => { - assert_eq!(path, kh_path); -@@ -551,7 +564,9 @@ mod tests { - assert!(!khs[0].host_matches("example.net")); - assert!(khs[2].host_matches("[example.net]:2222")); - assert!(!khs[2].host_matches("example.net")); -- assert!(!khs[8].host_matches("neg.example.com")); -+ assert!(khs[8].host_matches("hashed.example.com")); -+ assert!(!khs[8].host_matches("example.com")); -+ assert!(!khs[9].host_matches("neg.example.com")); - } - - #[test] ---- rust-cargo-0.66.0.orig/Cargo.toml -+++ rust-cargo-0.66.0/Cargo.toml -@@ -33,6 +33,7 @@ git2 = "0.16.0" - git2-curl = "0.17.0" - glob = "0.3.0" - hex = "0.4" -+hmac = "0.12.1" - home = "0.5" - humantime = "2.0.0" - indexmap = "1" -@@ -53,6 +54,7 @@ semver = { version = "1.0.3", features = - serde = { version = "1.0.123", features = ["derive"] } - serde_ignored = "0.1.0" - serde_json = { version = "1.0.30", features = ["raw_value"] } -+sha1 = "0.10.1" - shell-escape = "0.1.4" - strip-ansi-escapes = "0.1.0" - tar = { version = "0.4.38", default-features = false } - diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-08-eliminate-let-else.patch cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-08-eliminate-let-else.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-08-eliminate-let-else.patch 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/cve/CVE-2022-46176-08-eliminate-let-else.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,61 +0,0 @@ -This patch eliminates let-else usage in the code introduced -to fix CVE-2022-46176 as that construct is not stabalised in -the version of rustc currently in Debian. - -It was written specifical for Debian by Peter Michael Green. - -Index: cargo/src/cargo/sources/git/known_hosts.rs -=================================================================== ---- cargo.orig/src/cargo/sources/git/known_hosts.rs -+++ cargo/src/cargo/sources/git/known_hosts.rs -@@ -89,11 +89,13 @@ pub fn certificate_check( - config_known_hosts: Option<&Vec>>, - diagnostic_home_config: &str, - ) -> Result { -- let Some(host_key) = cert.as_hostkey() else { -+ let host_key = cert.as_hostkey(); -+ if host_key.is_none() { - // Return passthrough for TLS X509 certificates to use whatever validation - // was done in git2. - return Ok(CertificateCheckStatus::CertificatePassthrough) - }; -+ let host_key = host_key.unwrap(); - // If a nonstandard port is in use, check for that first. - // The fallback to check without a port is handled in the HostKeyNotFound handler. - let host_maybe_port = match port { -@@ -234,9 +236,11 @@ fn check_ssh_known_hosts( - host: &str, - config_known_hosts: Option<&Vec>>, - ) -> Result<(), KnownHostError> { -- let Some(remote_host_key) = cert_host_key.hostkey() else { -+ let remote_host_key = cert_host_key.hostkey(); -+ if remote_host_key.is_none() { - return Err(anyhow::format_err!("remote host key is not available").into()); - }; -+ let remote_host_key = remote_host_key.unwrap(); - let remote_key_type = cert_host_key.hostkey_type().unwrap(); - - // Collect all the known host entries from disk. -@@ -455,10 +459,18 @@ impl KnownHost { - } - - fn hashed_hostname_matches(host: &str, hashed: &str) -> bool { -- let Some((b64_salt, b64_host)) = hashed.split_once('|') else { return false; }; -- let Ok(salt) = base64::decode(b64_salt) else { return false; }; -- let Ok(hashed_host) = base64::decode(b64_host) else { return false; }; -- let Ok(mut mac) = hmac::Hmac::::new_from_slice(&salt) else { return false; }; -+ let hostandsalt = hashed.split_once('|'); -+ if hostandsalt.is_none() { return false; }; -+ let (b64_salt, b64_host) = hostandsalt.unwrap(); -+ let salt = base64::decode(b64_salt); -+ if salt.is_err() { return false; }; -+ let salt = salt.unwrap(); -+ let hashed_host = base64::decode(b64_host); -+ if hashed_host.is_err() { return false; }; -+ let hashed_host = hashed_host.unwrap(); -+ let mac = hmac::Hmac::::new_from_slice(&salt); -+ if mac.is_err() { return false; }; -+ let mut mac = mac.unwrap(); - mac.update(host.as_bytes()); - let result = mac.finalize().into_bytes(); - hashed_host == &result[..] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/series cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/series --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/patches/series 2023-01-18 20:42:34.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/patches/series 2023-02-26 01:05:50.000000000 +0000 @@ -5,15 +5,6 @@ disable-fs-specific-test.patch 0003-tests-add-missing-cross-disabled-checks.patch -cve/CVE-2022-46176-01-validate-ssh-host.keys.patch -cve/CVE-2022-46176-02-add-support-for-deserializing-vec-value-string.patch -cve/CVE-2022-46176-03-support-configuring-ssh-known-hosts.patch -cve/CVE-2022-46176-04-add-some-known-hosts-tests-and-fix-comma-bug.patch -cve/CVE-2022-46176-05-remove-let-else.patch -cve/CVE-2022-46176-06-add-test-for-config-value-in-toml-array.patch -cve/CVE-2022-46176-07-support-hashed-hostnames.patch -cve/CVE-2022-46176-08-eliminate-let-else.patch - remove-badges.patch proxy-skip-tests.patch i386-crossbuild-tests.patch diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/debian/vendor-tarball-unsuspicious.txt cargo-0.67.1+ds0ubuntu0.libgit2/debian/vendor-tarball-unsuspicious.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/debian/vendor-tarball-unsuspicious.txt 2023-01-17 14:05:25.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/debian/vendor-tarball-unsuspicious.txt 2023-02-26 01:05:50.000000000 +0000 @@ -44,7 +44,6 @@ # ideally should be autogenerated, but too difficult today bstr/src/unicode/fsm/*.dfa -bstr-0.2.17/src/unicode/fsm/*.dfa regex-syntax/src/unicode_tables/*.rs # "verylongtext" but OK source code, manually audited: diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/.github/workflows/contrib.yml cargo-0.67.1+ds0ubuntu0.libgit2/.github/workflows/contrib.yml --- cargo-0.66.0+ds0ubuntu0.libgit2/.github/workflows/contrib.yml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/.github/workflows/contrib.yml 2023-01-10 13:41:19.000000000 +0000 @@ -21,6 +21,8 @@ mkdir mdbook curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.9/mdbook-v0.4.9-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook echo `pwd`/mdbook >> $GITHUB_PATH + - name: Update toolchain + run: rustup update --no-self-update stable && rustup default stable - name: Build API doc run: | cargo doc --document-private-items --no-deps diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/.github/workflows/main.yml cargo-0.67.1+ds0ubuntu0.libgit2/.github/workflows/main.yml --- cargo-0.66.0+ds0ubuntu0.libgit2/.github/workflows/main.yml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/.github/workflows/main.yml 2023-01-10 13:41:19.000000000 +0000 @@ -80,6 +80,13 @@ # Deny warnings on CI to avoid warnings getting into the codebase. - run: cargo test --features 'deny-warnings' + # The testsuite generates a huge amount of data, and fetch-smoke-test was + # running out of disk space. + - name: Clear test output + run: | + df -h + rm -rf target/tmp + df -h - name: Check operability of rustc invocation with argfile env: __CARGO_TEST_FORCE_ARGFILE: 1 @@ -111,7 +118,7 @@ cargo check --manifest-path benches/capture/Cargo.toml # The testsuite generates a huge amount of data, and fetch-smoke-test was # running out of disk space. - - name: Clear test output + - name: Clear benchmark output run: | df -h rm -rf target/tmp diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/cli.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/cli.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/cli.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/cli.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,9 +2,11 @@ use cargo::core::shell::Shell; use cargo::core::{features, CliUnstable}; use cargo::{self, drop_print, drop_println, CliResult, Config}; -use clap::{AppSettings, Arg, ArgMatches}; +use clap::{Arg, ArgMatches}; use itertools::Itertools; use std::collections::HashMap; +use std::ffi::OsStr; +use std::ffi::OsString; use std::fmt::Write; use super::commands; @@ -30,9 +32,6 @@ // the [alias] table). let config = config.get_mut(); - // Global args need to be extracted before expanding aliases because the - // clap code for extracting a subcommand discards global options - // (appearing before the subcommand). let (expanded_args, global_args) = expand_aliases(config, args, vec![])?; if expanded_args @@ -70,7 +69,7 @@ {} -Run with 'cargo -Z [FLAG] [SUBCOMMAND]'", +Run with 'cargo -Z [FLAG] [COMMAND]'", joined ); if !config.nightly_features_allowed { @@ -150,7 +149,7 @@ } }; config_configure(config, &expanded_args, subcommand_args, global_args)?; - super::init_git_transports(config); + super::init_git(config); execute_subcommand(config, cmd, subcommand_args) } @@ -222,41 +221,54 @@ } } +/// Expands aliases recursively to collect all the command line arguments. +/// +/// [`GlobalArgs`] need to be extracted before expanding aliases because the +/// clap code for extracting a subcommand discards global options +/// (appearing before the subcommand). fn expand_aliases( config: &mut Config, args: ArgMatches, mut already_expanded: Vec, ) -> Result<(ArgMatches, GlobalArgs), CliError> { if let Some((cmd, args)) = args.subcommand() { - match ( - commands::builtin_exec(cmd), - super::aliased_command(config, cmd)?, - ) { - (Some(_), Some(_)) => { + let exec = commands::builtin_exec(cmd); + let aliased_cmd = super::aliased_command(config, cmd); + + match (exec, aliased_cmd) { + (Some(_), Ok(Some(_))) => { // User alias conflicts with a built-in subcommand config.shell().warn(format!( "user-defined alias `{}` is ignored, because it is shadowed by a built-in command", cmd, ))?; } - (Some(_), None) => { - // Command is built-in and is not conflicting with alias, but contains ignored values. - if let Some(mut values) = args.get_many::("") { - config.shell().warn(format!( - "trailing arguments after built-in command `{}` are ignored: `{}`", + (Some(_), Ok(None) | Err(_)) => { + // Here we ignore errors from aliasing as we already favor built-in command, + // and alias doesn't involve in this context. + + if let Some(values) = args.get_many::("") { + // Command is built-in and is not conflicting with alias, but contains ignored values. + return Err(anyhow::format_err!( + "\ +trailing arguments after built-in command `{}` are unsupported: `{}` + +To pass the arguments to the subcommand, remove `--`", cmd, - values.join(" "), - ))?; + values.map(|s| s.to_string_lossy()).join(" "), + ) + .into()); } } - (None, None) => {} - (_, Some(mut alias)) => { - // Check if this alias is shadowing an external subcommand + (None, Ok(None)) => {} + (None, Ok(Some(alias))) => { + // Check if a user-defined alias is shadowing an external subcommand // (binary of the form `cargo-`) // Currently this is only a warning, but after a transition period this will become // a hard error. - if let Some(path) = super::find_external_subcommand(config, cmd) { - config.shell().warn(format!( + if super::builtin_aliases_execs(cmd).is_none() { + if let Some(path) = super::find_external_subcommand(config, cmd) { + config.shell().warn(format!( "\ user-defined alias `{}` is shadowing an external subcommand found at: `{}` This was previously accepted but is being phased out; it will become a hard error in a future release. @@ -264,9 +276,14 @@ cmd, path.display(), ))?; + } } - alias.extend(args.get_many::("").unwrap_or_default().cloned()); + let mut alias = alias + .into_iter() + .map(|s| OsString::from(s)) + .collect::>(); + alias.extend(args.get_many::("").unwrap_or_default().cloned()); // new_args strips out everything before the subcommand, so // capture those global options now. // Note that an alias to an external command will not receive @@ -290,6 +307,7 @@ let (expanded_args, _) = expand_aliases(config, new_args, already_expanded)?; return Ok((expanded_args, global_args)); } + (None, Err(e)) => return Err(e.into()), } }; @@ -342,12 +360,12 @@ return exec(config, subcommand_args); } - let mut ext_args: Vec<&str> = vec![cmd]; + let mut ext_args: Vec<&OsStr> = vec![OsStr::new(cmd)]; ext_args.extend( subcommand_args - .get_many::("") + .get_many::("") .unwrap_or_default() - .map(String::as_str), + .map(OsString::as_os_str), ); super::execute_external_subcommand(config, cmd, &ext_args) } @@ -387,16 +405,15 @@ } } -pub fn cli() -> App { +pub fn cli() -> Command { let is_rustup = std::env::var_os("RUSTUP_HOME").is_some(); let usage = if is_rustup { - "cargo [+toolchain] [OPTIONS] [SUBCOMMAND]" + "cargo [+toolchain] [OPTIONS] [COMMAND]" } else { - "cargo [OPTIONS] [SUBCOMMAND]" + "cargo [OPTIONS] [COMMAND]" }; - App::new("cargo") + Command::new("cargo") .allow_external_subcommands(true) - .setting(AppSettings::DeriveDisplayOrder) // Doesn't mix well with our list of common cargo commands. See clap-rs/clap#3108 for // opening clap up to allow us to style our help template .disable_colored_help(true) @@ -407,10 +424,9 @@ "\ Rust's package manager -USAGE: - {usage} +Usage: {usage} -OPTIONS: +Options: {options} Some common cargo commands are (see all commands with --list): @@ -421,6 +437,7 @@ new Create a new cargo package init Create a new cargo package in an existing directory add Add dependencies to a manifest file + remove Remove dependencies from a manifest file run, r Run a binary or example of the local package test, t Run the tests bench Run the benchmarks diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/add.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/add.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/add.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/add.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,3 +1,4 @@ +use cargo::sources::CRATES_IO_REGISTRY; use indexmap::IndexMap; use indexmap::IndexSet; @@ -6,29 +7,27 @@ use cargo::ops::cargo_add::add; use cargo::ops::cargo_add::AddOptions; use cargo::ops::cargo_add::DepOp; -use cargo::ops::cargo_add::DepTable; use cargo::ops::resolve_ws; use cargo::util::command_prelude::*; use cargo::util::interning::InternedString; +use cargo::util::toml_mut::manifest::DepTable; use cargo::CargoResult; -pub fn cli() -> clap::Command<'static> { +pub fn cli() -> Command { clap::Command::new("add") - .setting(clap::AppSettings::DeriveDisplayOrder) .about("Add dependencies to a Cargo.toml manifest file") .override_usage( "\ - cargo add [OPTIONS] [@] ... - cargo add [OPTIONS] --path ... - cargo add [OPTIONS] --git ..." + cargo add [OPTIONS] [@] ... + cargo add [OPTIONS] --path ... + cargo add [OPTIONS] --git ..." ) .after_help("Run `cargo help add` for more detailed information.\n") .group(clap::ArgGroup::new("selected").multiple(true).required(true)) .args([ clap::Arg::new("crates") - .takes_value(true) .value_name("DEP_ID") - .multiple_values(true) + .num_args(0..) .help("Reference to a package to add as a dependency") .long_help( "Reference to a package to add as a dependency @@ -46,7 +45,6 @@ clap::Arg::new("features") .short('F') .long("features") - .takes_value(true) .value_name("FEATURES") .action(ArgAction::Append) .help("Space or comma separated list of features to activate"), @@ -65,7 +63,7 @@ .overrides_with("optional"), clap::Arg::new("rename") .long("rename") - .takes_value(true) + .action(ArgAction::Set) .value_name("NAME") .help("Rename the dependency") .long_help("Rename the dependency @@ -79,24 +77,24 @@ clap::Arg::new("package") .short('p') .long("package") - .takes_value(true) + .action(ArgAction::Set) .value_name("SPEC") .help("Package to modify"), ]) .arg_quiet() .arg_dry_run("Don't actually write the manifest") - .next_help_heading("SOURCE") + .next_help_heading("Source") .args([ clap::Arg::new("path") .long("path") - .takes_value(true) + .action(ArgAction::Set) .value_name("PATH") .help("Filesystem path to local crate to add") .group("selected") .conflicts_with("git"), clap::Arg::new("git") .long("git") - .takes_value(true) + .action(ArgAction::Set) .value_name("URI") .help("Git repository location") .long_help("Git repository location @@ -105,21 +103,21 @@ .group("selected"), clap::Arg::new("branch") .long("branch") - .takes_value(true) + .action(ArgAction::Set) .value_name("BRANCH") .help("Git branch to download the crate from") .requires("git") .group("git-ref"), clap::Arg::new("tag") .long("tag") - .takes_value(true) + .action(ArgAction::Set) .value_name("TAG") .help("Git tag to download the crate from") .requires("git") .group("git-ref"), clap::Arg::new("rev") .long("rev") - .takes_value(true) + .action(ArgAction::Set) .value_name("REV") .help("Git reference to download the crate from") .long_help("Git reference to download the crate from @@ -129,11 +127,11 @@ .group("git-ref"), clap::Arg::new("registry") .long("registry") - .takes_value(true) + .action(ArgAction::Set) .value_name("NAME") .help("Package registry for this dependency"), ]) - .next_help_heading("SECTION") + .next_help_heading("Section") .args([ flag("dev", "Add as development dependency") @@ -151,7 +149,7 @@ .group("section"), clap::Arg::new("target") .long("target") - .takes_value(true) + .action(ArgAction::Set) .value_name("TARGET") .value_parser(clap::builder::NonEmptyStringValueParser::new()) .help("Add as dependency to the given target platform") @@ -210,7 +208,10 @@ let rev = matches.get_one::("rev"); let tag = matches.get_one::("tag"); let rename = matches.get_one::("rename"); - let registry = matches.registry(config)?; + let registry = match matches.registry(config)? { + Some(reg) if reg == CRATES_IO_REGISTRY => None, + reg => reg, + }; let default_features = default_features(matches); let optional = optional(matches); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/bench.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/bench.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/bench.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/bench.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,19 +1,19 @@ use crate::command_prelude::*; use cargo::ops::{self, TestOptions}; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("bench") - .trailing_var_arg(true) .about("Execute all benchmarks of a local package") .arg_quiet() .arg( Arg::new("BENCHNAME") + .action(ArgAction::Set) .help("If specified, only run benches containing this string in their names"), ) .arg( Arg::new("args") .help("Arguments for the bench binary") - .multiple_values(true) + .num_args(0..) .last(true), ) .arg_targets_all( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/build.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/build.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/build.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/build.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("build") // subcommand aliases are handled in aliased_command() // .alias("b") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/check.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/check.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/check.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/check.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("check") // subcommand aliases are handled in aliased_command() // .alias("c") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/clean.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/clean.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/clean.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/clean.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,7 +3,7 @@ use cargo::ops::{self, CleanOptions}; use cargo::util::print_available_packages; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("clean") .about("Remove artifacts that cargo has generated in the past") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/config.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/config.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/config.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/config.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,7 @@ use crate::command_prelude::*; use cargo::ops::cargo_config; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("config") .about("Inspect configuration values") .after_help("Run `cargo help config` for more detailed information.\n") @@ -9,7 +9,11 @@ .arg_required_else_help(true) .subcommand( subcommand("get") - .arg(Arg::new("key").help("The config key to display")) + .arg( + Arg::new("key") + .action(ArgAction::Set) + .help("The config key to display"), + ) .arg( opt("format", "Display format") .value_parser(cargo_config::ConfigFormat::POSSIBLE_VALUES) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/doc.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/doc.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/doc.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/doc.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops::{self, DocOptions}; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("doc") // subcommand aliases are handled in aliased_command() // .alias("d") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/fetch.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/fetch.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/fetch.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/fetch.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,7 +3,7 @@ use cargo::ops; use cargo::ops::FetchOptions; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("fetch") .about("Fetch dependencies of a package from the network") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/fix.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/fix.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/fix.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/fix.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("fix") .about("Automatically fix lint warnings reported by rustc") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/generate_lockfile.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/generate_lockfile.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/generate_lockfile.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/generate_lockfile.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("generate-lockfile") .about("Generate the lockfile for a package") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/git_checkout.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/git_checkout.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/git_checkout.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/git_checkout.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,10 +1,10 @@ use crate::command_prelude::*; -const REMOVED: &str = "The `git-checkout` subcommand has been removed."; +const REMOVED: &str = "The `git-checkout` command has been removed."; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("git-checkout") - .about("This subcommand has been removed") + .about("This command has been removed") .hide(true) .override_help(REMOVED) } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/help.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/help.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/help.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/help.rs 2023-01-10 13:41:19.000000000 +0000 @@ -4,6 +4,7 @@ use cargo::{drop_println, Config}; use cargo_util::paths::resolve_executable; use flate2::read::GzDecoder; +use std::ffi::OsStr; use std::ffi::OsString; use std::io::Read; use std::io::Write; @@ -11,17 +12,21 @@ const COMPRESSED_MAN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/man.tgz")); -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("help") .about("Displays help for a cargo subcommand") - .arg(Arg::new("SUBCOMMAND")) + .arg(Arg::new("COMMAND").action(ArgAction::Set)) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let subcommand = args.get_one::("SUBCOMMAND"); + let subcommand = args.get_one::("COMMAND"); if let Some(subcommand) = subcommand { if !try_help(config, subcommand)? { - crate::execute_external_subcommand(config, subcommand, &[subcommand, "--help"])?; + crate::execute_external_subcommand( + config, + subcommand, + &[OsStr::new(subcommand), OsStr::new("--help")], + )?; } } else { let mut cmd = crate::cli::cli(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/init.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/init.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/init.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/init.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,11 +2,11 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("init") .about("Create a new cargo package in an existing directory") .arg_quiet() - .arg(Arg::new("path").default_value(".")) + .arg(Arg::new("path").action(ArgAction::Set).default_value(".")) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg_new_opts() .after_help("Run `cargo help init` for more detailed information.\n") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/install.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/install.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/install.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/install.rs 2023-01-10 13:41:19.000000000 +0000 @@ -6,14 +6,14 @@ use cargo_util::paths; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("install") .about("Install a Rust binary. Default location is $HOME/.cargo/bin") .arg_quiet() .arg( Arg::new("crate") .value_parser(clap::builder::NonEmptyStringValueParser::new()) - .multiple_values(true), + .num_args(0..), ) .arg( opt("version", "Specify a version to install") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/locate_project.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/locate_project.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/locate_project.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/locate_project.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,7 +3,7 @@ use cargo::{drop_println, CargoResult}; use serde::Serialize; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("locate-project") .about("Print a JSON representation of a Cargo.toml file's location") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/login.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/login.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/login.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/login.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,14 +2,14 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("login") .about( "Save an api token from the registry locally. \ If token is not specified, it will be read from stdin.", ) .arg_quiet() - .arg(Arg::new("token")) + .arg(Arg::new("token").action(ArgAction::Set)) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .after_help("Run `cargo help login` for more detailed information.\n") } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/logout.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/logout.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/logout.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/logout.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,7 @@ use crate::command_prelude::*; use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("logout") .about("Remove an API token from the registry locally") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/metadata.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/metadata.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/metadata.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/metadata.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,7 @@ use crate::command_prelude::*; use cargo::ops::{self, OutputMetadataOptions}; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("metadata") .about( "Output the resolved dependencies of a package, \ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,6 @@ use crate::command_prelude::*; -pub fn builtin() -> Vec { +pub fn builtin() -> Vec { vec![ add::cli(), bench::cli(), @@ -26,6 +26,7 @@ pkgid::cli(), publish::cli(), read_manifest::cli(), + remove::cli(), report::cli(), run::cli(), rustc::cli(), @@ -68,6 +69,7 @@ "pkgid" => pkgid::exec, "publish" => publish::exec, "read-manifest" => read_manifest::exec, + "remove" => remove::exec, "report" => report::exec, "run" => run::exec, "rustc" => rustc::exec, @@ -110,6 +112,7 @@ pub mod pkgid; pub mod publish; pub mod read_manifest; +pub mod remove; pub mod report; pub mod run; pub mod rustc; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/new.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/new.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/new.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/new.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,11 +2,11 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("new") .about("Create a new cargo package at ") .arg_quiet() - .arg(Arg::new("path").required(true)) + .arg(Arg::new("path").action(ArgAction::Set).required(true)) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg_new_opts() .after_help("Run `cargo help new` for more detailed information.\n") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/owner.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/owner.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/owner.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/owner.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,11 +2,11 @@ use cargo::ops::{self, OwnersOptions}; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("owner") .about("Manage the owners of a crate on the registry") .arg_quiet() - .arg(Arg::new("crate")) + .arg(Arg::new("crate").action(ArgAction::Set)) .arg( multi_opt( "add", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/package.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/package.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/package.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/package.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops::{self, PackageOpts}; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("package") .about("Assemble the local package into a distributable tarball") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/pkgid.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/pkgid.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/pkgid.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/pkgid.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,11 +3,11 @@ use cargo::ops; use cargo::util::print_available_packages; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("pkgid") .about("Print a fully qualified package specification") .arg_quiet() - .arg(Arg::new("spec")) + .arg(Arg::new("spec").action(ArgAction::Set)) .arg_package("Argument to get the package ID specifier for") .arg_manifest_path() .after_help("Run `cargo help pkgid` for more detailed information.\n") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/publish.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/publish.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/publish.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/publish.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::ops::{self, PublishOpts}; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("publish") .about("Upload a package to the registry") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/read_manifest.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/read_manifest.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/read_manifest.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/read_manifest.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,6 @@ use crate::command_prelude::*; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("read-manifest") .about( "\ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/remove.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/remove.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/remove.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/remove.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,116 @@ +use cargo::core::dependency::DepKind; +use cargo::ops::cargo_remove::remove; +use cargo::ops::cargo_remove::RemoveOptions; +use cargo::ops::resolve_ws; +use cargo::util::command_prelude::*; +use cargo::util::toml_mut::manifest::DepTable; + +pub fn cli() -> clap::Command { + clap::Command::new("remove") + // Subcommand aliases are handled in `aliased_command()`. + // .alias("rm") + .about("Remove dependencies from a Cargo.toml manifest file") + .args([clap::Arg::new("dependencies") + .action(clap::ArgAction::Append) + .required(true) + .num_args(1..) + .value_name("DEP_ID") + .help("Dependencies to be removed")]) + .arg_package("Package to remove from") + .arg_manifest_path() + .arg_quiet() + .arg_dry_run("Don't actually write the manifest") + .next_help_heading("Section") + .args([ + clap::Arg::new("dev") + .long("dev") + .conflicts_with("build") + .action(clap::ArgAction::SetTrue) + .group("section") + .help("Remove as development dependency"), + clap::Arg::new("build") + .long("build") + .conflicts_with("dev") + .action(clap::ArgAction::SetTrue) + .group("section") + .help("Remove as build dependency"), + clap::Arg::new("target") + .long("target") + .num_args(1) + .value_name("TARGET") + .value_parser(clap::builder::NonEmptyStringValueParser::new()) + .help("Remove as dependency from the given target platform"), + ]) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let dry_run = args.dry_run(); + + let workspace = args.workspace(config)?; + let packages = args.packages_from_flags()?; + let packages = packages.get_packages(&workspace)?; + let spec = match packages.len() { + 0 => { + return Err(CliError::new( + anyhow::format_err!("no packages selected. Please specify one with `-p `"), + 101, + )); + } + 1 => packages[0], + len => { + return Err(CliError::new( + anyhow::format_err!( + "{len} packages selected. Please specify one with `-p `", + ), + 101, + )); + } + }; + + let dependencies = args + .get_many::("dependencies") + .expect("required(true)") + .cloned() + .collect(); + + let section = parse_section(args); + + let options = RemoveOptions { + config, + spec, + dependencies, + section, + dry_run, + }; + remove(&options)?; + + if !dry_run { + // Reload the workspace since we've changed dependencies + let ws = args.workspace(config)?; + resolve_ws(&ws)?; + } + + Ok(()) +} + +fn parse_section(args: &ArgMatches) -> DepTable { + let dev = args.flag("dev"); + let build = args.flag("build"); + + let kind = if dev { + DepKind::Development + } else if build { + DepKind::Build + } else { + DepKind::Normal + }; + + let mut table = DepTable::new().set_kind(kind); + + if let Some(target) = args.get_one::("target") { + assert!(!target.is_empty(), "Target specification may not be empty"); + table = table.set_target(target); + } + + table +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/report.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/report.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/report.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/report.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use cargo::core::compiler::future_incompat::{OnDiskReports, REPORT_PREAMBLE}; use cargo::drop_println; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("report") .about("Generate and display various kinds of reports") .after_help("Run `cargo help report` for more detailed information.\n") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/run.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/run.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/run.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/run.rs 2023-01-10 13:41:19.000000000 +0000 @@ -4,17 +4,17 @@ use cargo::ops::{self, CompileFilter, Packages}; use cargo_util::ProcessError; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("run") // subcommand aliases are handled in aliased_command() // .alias("r") - .trailing_var_arg(true) .about("Run a binary or example of the local package") .arg_quiet() .arg( Arg::new("args") .value_parser(value_parser!(std::ffi::OsString)) - .multiple_values(true), + .num_args(0..) + .trailing_var_arg(true), ) .arg_targets_bin_example( "Name of the bin target to run", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustc.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustc.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustc.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustc.rs 2023-01-10 13:41:19.000000000 +0000 @@ -5,12 +5,16 @@ const PRINT_ARG_NAME: &str = "print"; const CRATE_TYPE_ARG_NAME: &str = "crate-type"; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("rustc") - .trailing_var_arg(true) .about("Compile a package, and pass extra options to the compiler") .arg_quiet() - .arg(Arg::new("args").multiple_values(true).help("Rustc flags")) + .arg( + Arg::new("args") + .num_args(0..) + .help("Rustc flags") + .trailing_var_arg(true), + ) .arg_package("Package to build") .arg_jobs() .arg_targets_all( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustdoc.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustdoc.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustdoc.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/rustdoc.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,12 +2,11 @@ use crate::command_prelude::*; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("rustdoc") - .trailing_var_arg(true) .about("Build a package's documentation, using specified custom flags.") .arg_quiet() - .arg(Arg::new("args").multiple_values(true)) + .arg(Arg::new("args").num_args(0..).trailing_var_arg(true)) .arg(flag( "open", "Opens the docs in a browser after the operation", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/search.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/search.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/search.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/search.rs 2023-01-10 13:41:19.000000000 +0000 @@ -4,11 +4,11 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("search") .about("Search packages in crates.io") .arg_quiet() - .arg(Arg::new("query").multiple_values(true)) + .arg(Arg::new("query").num_args(0..)) .arg_index() .arg( opt( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/test.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/test.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/test.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/test.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,20 +1,20 @@ use crate::command_prelude::*; use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("test") // Subcommand aliases are handled in `aliased_command()`. // .alias("t") - .trailing_var_arg(true) .about("Execute all unit and integration tests and build examples of a local package") .arg( Arg::new("TESTNAME") + .action(ArgAction::Set) .help("If specified, only run tests containing this string in their names"), ) .arg( Arg::new("args") .help("Arguments for the test binary") - .multiple_values(true) + .num_args(0..) .last(true), ) .arg( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/tree.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/tree.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/tree.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/tree.rs 2023-01-10 13:41:19.000000000 +0000 @@ -9,7 +9,7 @@ use std::collections::HashSet; use std::str::FromStr; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("tree") .about("Display a tree visualization of a dependency graph") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/uninstall.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/uninstall.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/uninstall.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/uninstall.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,11 +2,11 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("uninstall") .about("Remove a Rust binary") .arg_quiet() - .arg(Arg::new("spec").multiple_values(true)) + .arg(Arg::new("spec").num_args(0..)) .arg_package_spec_simple("Package to uninstall") .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME")) .arg(opt("root", "Directory to uninstall packages from").value_name("DIR")) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/update.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/update.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/update.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/update.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,7 +3,7 @@ use cargo::ops::{self, UpdateOptions}; use cargo::util::print_available_packages; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("update") .about("Update dependencies as recorded in the local lock file") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/vendor.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/vendor.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/vendor.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/vendor.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,13 +2,14 @@ use cargo::ops; use std::path::PathBuf; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("vendor") .about("Vendor all dependencies for a project locally") .arg_quiet() .arg_manifest_path() .arg( Arg::new("path") + .action(ArgAction::Set) .value_parser(clap::value_parser!(PathBuf)) .help("Where to vendor crates (`vendor` by default)"), ) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/verify_project.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/verify_project.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/verify_project.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/verify_project.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,7 +3,7 @@ use std::collections::HashMap; use std::process; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("verify-project") .about("Check correctness of crate manifest") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/version.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/version.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/version.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/version.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,7 @@ use crate::cli; use crate::command_prelude::*; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("version") .about("Show version information") .arg_quiet() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/yank.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/yank.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/commands/yank.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/commands/yank.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,11 +2,11 @@ use cargo::ops; -pub fn cli() -> App { +pub fn cli() -> Command { subcommand("yank") .about("Remove a pushed crate from the index") .arg_quiet() - .arg(Arg::new("crate")) + .arg(Arg::new("crate").action(ArgAction::Set)) .arg( opt("version", "The version to yank or un-yank") .alias("vers") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/main.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/main.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/bin/cargo/main.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/bin/cargo/main.rs 2023-01-10 13:41:19.000000000 +0000 @@ -7,6 +7,7 @@ use cargo_util::{ProcessBuilder, ProcessError}; use std::collections::BTreeMap; use std::env; +use std::ffi::OsStr; use std::fs; use std::path::{Path, PathBuf}; @@ -38,12 +39,13 @@ /// Table for defining the aliases which come builtin in `Cargo`. /// The contents are structured as: `(alias, aliased_command, description)`. -const BUILTIN_ALIASES: [(&str, &str, &str); 5] = [ +const BUILTIN_ALIASES: [(&str, &str, &str); 6] = [ ("b", "build", "alias: build"), ("c", "check", "alias: check"), ("d", "doc", "alias: doc"), ("r", "run", "alias: run"), ("t", "test", "alias: test"), + ("rm", "remove", "alias: remove"), ]; /// Function which contains the list of all of the builtin aliases and it's @@ -52,6 +54,14 @@ BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd) } +/// Resolve the aliased command from the [`Config`] with a given command string. +/// +/// The search fallback chain is: +/// +/// 1. Get the aliased command as a string. +/// 2. If an `Err` occurs (missing key, type mismatch, or any possible error), +/// try to get it as an array again. +/// 3. If still cannot find any, finds one insides [`BUILTIN_ALIASES`]. fn aliased_command(config: &Config, command: &str) -> CargoResult>> { let alias_name = format!("alias.{}", command); let user_alias = match config.get_string(&alias_name) { @@ -152,14 +162,14 @@ .find(|file| is_executable(file)) } -fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> CliResult { +fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> CliResult { let path = find_external_subcommand(config, cmd); let command = match path { Some(command) => command, None => { let err = if cmd.starts_with('+') { anyhow::format_err!( - "no such subcommand: `{}`\n\n\t\ + "no such command: `{}`\n\n\t\ Cargo does not handle `+toolchain` directives.\n\t\ Did you mean to invoke `cargo` through `rustup` instead?", cmd @@ -169,7 +179,7 @@ let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c); anyhow::format_err!( - "no such subcommand: `{}`{}\n\n\t\ + "no such command: `{}`{}\n\n\t\ View all installed commands with `cargo --list`", cmd, did_you_mean @@ -236,6 +246,38 @@ path_dirs } +/// Initialize libgit2. +fn init_git(config: &Config) { + // Disabling the owner validation in git can, in theory, lead to code execution + // vulnerabilities. However, libgit2 does not launch executables, which is the foundation of + // the original security issue. Meanwhile, issues with refusing to load git repos in + // `CARGO_HOME` for example will likely be very frustrating for users. So, we disable the + // validation. + // + // For further discussion of Cargo's current interactions with git, see + // + // https://github.com/rust-lang/rfcs/pull/3279 + // + // and in particular the subsection on "Git support". + // + // Note that we only disable this when Cargo is run as a binary. If Cargo is used as a library, + // this code won't be invoked. Instead, developers will need to explicitly disable the + // validation in their code. This is inconvenient, but won't accidentally open consuming + // applications up to security issues if they use git2 to open repositories elsewhere in their + // code. + unsafe { + git2::opts::set_verify_owner_validation(false) + .expect("set_verify_owner_validation should never fail"); + } + + init_git_transports(config); +} + +/// Configure libgit2 to use libcurl if necessary. +/// +/// If the user has a non-default network configuration, then libgit2 will be +/// configured to use libcurl instead of the built-in networking support so +/// that those configuration settings can be used. fn init_git_transports(config: &Config) { // Only use a custom transport if any HTTP options are specified, // such as proxies or custom certificate authorities. The custom @@ -264,27 +306,4 @@ unsafe { git2_curl::register(handle); } - - // Disabling the owner validation in git can, in theory, lead to code execution - // vulnerabilities. However, libgit2 does not launch executables, which is the foundation of - // the original security issue. Meanwhile, issues with refusing to load git repos in - // `CARGO_HOME` for example will likely be very frustrating for users. So, we disable the - // validation. - // - // For further discussion of Cargo's current interactions with git, see - // - // https://github.com/rust-lang/rfcs/pull/3279 - // - // and in particular the subsection on "Git support". - // - // Note that we only disable this when Cargo is run as a binary. If Cargo is used as a library, - // this code won't be invoked. Instead, developers will need to explicitly disable the - // validation in their code. This is inconvenient, but won't accidentally open consuming - // applications up to security issues if they use git2 to open repositories elsewhere in their - // code. - unsafe { - if git2::opts::set_verify_owner_validation(false).is_err() { - return; - } - } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,3 +1,5 @@ +//! [`BuildContext`] is a (mostly) static information about a build task. + use crate::core::compiler::unit_graph::UnitGraph; use crate::core::compiler::{BuildConfig, CompileKind, Unit}; use crate::core::profiles::Profiles; @@ -15,19 +17,42 @@ FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, }; -/// The build context, containing all information about a build task. +/// The build context, containing complete infomration needed for a build task +/// before it gets started. /// /// It is intended that this is mostly static information. Stuff that mutates -/// during the build can be found in the parent `Context`. (I say mostly, +/// during the build can be found in the parent [`Context`]. (I say mostly, /// because this has internal caching, but nothing that should be observable /// or require &mut.) +/// +/// As a result, almost every field on `BuildContext` is public, including +/// +/// * a resolved [`UnitGraph`] of your dependencies, +/// * a [`Profiles`] containing compiler flags presets, +/// * a [`RustcTargetData`] containing host and target platform information, +/// * and a [`PackageSet`] for further package downloads, +/// +/// just to name a few. Learn more on each own documentation. +/// +/// # How to use +/// +/// To prepare a build task, you may not want to use [`BuildContext::new`] directly, +/// since it is often too lower-level. +/// Instead, [`ops::create_bcx`] is usually what you are looking for. +/// +/// [`Context`]: crate::core::compiler::Context +/// [`ops::create_bcx`]: crate::ops::create_bcx pub struct BuildContext<'a, 'cfg> { /// The workspace the build is for. pub ws: &'a Workspace<'cfg>, /// The cargo configuration. pub config: &'cfg Config, + + /// This contains a collection of compiler flags presets. pub profiles: Profiles, + + /// Configuration information for a rustc build. pub build_config: &'a BuildConfig, /// Extra compiler args for either `rustc` or `rustdoc`. @@ -47,7 +72,7 @@ /// The dependency graph of units to compile. pub unit_graph: UnitGraph, - /// Reverse-dependencies of documented units, used by the rustdoc --scrape-examples flag. + /// Reverse-dependencies of documented units, used by the `rustdoc --scrape-examples` flag. pub scrape_units: Vec, /// The list of all kinds that are involved in this build @@ -88,6 +113,7 @@ }) } + /// Information of the `rustc` this build task will use. pub fn rustc(&self) -> &Rustc { &self.target_data.rustc } @@ -116,14 +142,36 @@ self.build_config.jobs } + /// Extra compiler flags to pass to `rustc` for a given unit. + /// + /// Although it depends on the caller, in the current Cargo implementation, + /// these flags take precendence over those from [`BuildContext::extra_args_for`]. + /// + /// As of now, these flags come from environment variables and configurations. + /// See [`TargetInfo.rustflags`] for more on how Cargo collects them. + /// + /// [`TargetInfo.rustflags`]: TargetInfo::rustflags pub fn rustflags_args(&self, unit: &Unit) -> &[String] { &self.target_data.info(unit.kind).rustflags } + /// Extra compiler flags to pass to `rustdoc` for a given unit. + /// + /// Although it depends on the caller, in the current Cargo implementation, + /// these flags take precendence over those from [`BuildContext::extra_args_for`]. + /// + /// As of now, these flags come from environment variables and configurations. + /// See [`TargetInfo.rustdocflags`] for more on how Cargo collects them. + /// + /// [`TargetInfo.rustdocflags`]: TargetInfo::rustdocflags pub fn rustdocflags_args(&self, unit: &Unit) -> &[String] { &self.target_data.info(unit.kind).rustdocflags } + /// Extra compiler args for either `rustc` or `rustdoc`. + /// + /// As of now, these flags come from the trailing args of either + /// `cargo rustc` or `cargo rustdoc`. pub fn extra_args_for(&self, unit: &Unit) -> Option<&Vec> { self.extra_compiler_args.get(unit) } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/target_info.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/target_info.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/target_info.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/build_context/target_info.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,3 +1,12 @@ +//! This modules contains types storing information of target platfroms. +//! +//! Normally, call [`RustcTargetData::new`] to construct all the target +//! platform once, and then query info on your demand. For example, +//! +//! * [`RustcTargetData::dep_platform_activated`] to check if platform is activated. +//! * [`RustcTargetData::info`] to get a [`TargetInfo`] for an in-depth query. +//! * [`TargetInfo::rustc_outputs`] to get a list of supported file types. + use crate::core::compiler::{ BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType, }; @@ -16,8 +25,9 @@ /// Information about the platform target gleaned from querying rustc. /// -/// `RustcTargetData` keeps two of these, one for the host and one for the -/// target. If no target is specified, it uses a clone from the host. +/// [`RustcTargetData`] keeps several of these, one for the host and the others +/// for other specified targets. If no target is specified, it uses a clone from +/// the host. #[derive(Clone)] pub struct TargetInfo { /// A base process builder for discovering crate type information. In @@ -41,9 +51,9 @@ /// Path to the "lib" directory in the sysroot which rustc uses for linking /// target libraries. pub sysroot_target_libdir: PathBuf, - /// Extra flags to pass to `rustc`, see `env_args`. + /// Extra flags to pass to `rustc`, see [`extra_args`]. pub rustflags: Vec, - /// Extra flags to pass to `rustdoc`, see `env_args`. + /// Extra flags to pass to `rustdoc`, see [`extra_args`]. pub rustdocflags: Vec, /// Whether or not rustc supports the `-Csplit-debuginfo` flag. pub supports_split_debuginfo: bool, @@ -132,13 +142,20 @@ } impl TargetInfo { + /// Learns the information of target platform from `rustc` invocation(s). + /// + /// Generally, the first time calling this function is expensive, as it may + /// query `rustc` several times. To reduce the cost, output of each `rustc` + /// invocation is cached by [`Rustc::cached_output`]. + /// + /// Search `Tricky` to learn why querying `rustc` several times is needed. pub fn new( config: &Config, requested_kinds: &[CompileKind], rustc: &Rustc, kind: CompileKind, ) -> CargoResult { - let rustflags = env_args( + let mut rustflags = extra_args( config, requested_kinds, &rustc.host, @@ -146,114 +163,149 @@ kind, Flags::Rust, )?; - let extra_fingerprint = kind.fingerprint_hash(); - let mut process = rustc.workspace_process(); - process - .arg("-") - .arg("--crate-name") - .arg("___") - .arg("--print=file-names") - .args(&rustflags) - .env_remove("RUSTC_LOG"); - - if let CompileKind::Target(target) = kind { - process.arg("--target").arg(target.rustc_target()); - } - - let crate_type_process = process.clone(); - const KNOWN_CRATE_TYPES: &[CrateType] = &[ - CrateType::Bin, - CrateType::Rlib, - CrateType::Dylib, - CrateType::Cdylib, - CrateType::Staticlib, - CrateType::ProcMacro, - ]; - for crate_type in KNOWN_CRATE_TYPES.iter() { - process.arg("--crate-type").arg(crate_type.as_str()); - } - let supports_split_debuginfo = rustc - .cached_output( - process.clone().arg("-Csplit-debuginfo=packed"), - extra_fingerprint, - ) - .is_ok(); + let mut turn = 0; + loop { + let extra_fingerprint = kind.fingerprint_hash(); + + // Query rustc for several kinds of info from each line of output: + // 0) file-names (to determine output file prefix/suffix for given crate type) + // 1) sysroot + // 2) cfg + // + // Search `--print` to see what we query so far. + let mut process = rustc.workspace_process(); + process + .arg("-") + .arg("--crate-name") + .arg("___") + .arg("--print=file-names") + .args(&rustflags) + .env_remove("RUSTC_LOG"); - process.arg("--print=sysroot"); - process.arg("--print=cfg"); + if let CompileKind::Target(target) = kind { + process.arg("--target").arg(target.rustc_target()); + } - let (output, error) = rustc - .cached_output(&process, extra_fingerprint) - .with_context(|| "failed to run `rustc` to learn about target-specific information")?; - - let mut lines = output.lines(); - let mut map = HashMap::new(); - for crate_type in KNOWN_CRATE_TYPES { - let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; - map.insert(crate_type.clone(), out); - } - - let line = match lines.next() { - Some(line) => line, - None => anyhow::bail!( - "output of --print=sysroot missing when learning about \ - target-specific information from rustc\n{}", - output_err_info(&process, &output, &error) - ), - }; - let sysroot = PathBuf::from(line); - let sysroot_host_libdir = if cfg!(windows) { - sysroot.join("bin") - } else { - sysroot.join("lib") - }; - let mut sysroot_target_libdir = sysroot.clone(); - sysroot_target_libdir.push("lib"); - sysroot_target_libdir.push("rustlib"); - sysroot_target_libdir.push(match &kind { - CompileKind::Host => rustc.host.as_str(), - CompileKind::Target(target) => target.short_name(), - }); - sysroot_target_libdir.push("lib"); - - let cfg = lines - .map(|line| Ok(Cfg::from_str(line)?)) - .filter(TargetInfo::not_user_specific_cfg) - .collect::>>() - .with_context(|| { - format!( - "failed to parse the cfg from `rustc --print=cfg`, got:\n{}", - output + let crate_type_process = process.clone(); + const KNOWN_CRATE_TYPES: &[CrateType] = &[ + CrateType::Bin, + CrateType::Rlib, + CrateType::Dylib, + CrateType::Cdylib, + CrateType::Staticlib, + CrateType::ProcMacro, + ]; + for crate_type in KNOWN_CRATE_TYPES.iter() { + process.arg("--crate-type").arg(crate_type.as_str()); + } + + // An extra `rustc` call to determine `-Csplit-debuginfo=packed` support. + let supports_split_debuginfo = rustc + .cached_output( + process.clone().arg("-Csplit-debuginfo=packed"), + extra_fingerprint, ) - })?; + .is_ok(); + + process.arg("--print=sysroot"); + process.arg("--print=cfg"); + + let (output, error) = rustc + .cached_output(&process, extra_fingerprint) + .with_context(|| { + "failed to run `rustc` to learn about target-specific information" + })?; + + let mut lines = output.lines(); + let mut map = HashMap::new(); + for crate_type in KNOWN_CRATE_TYPES { + let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; + map.insert(crate_type.clone(), out); + } + + let line = match lines.next() { + Some(line) => line, + None => anyhow::bail!( + "output of --print=sysroot missing when learning about \ + target-specific information from rustc\n{}", + output_err_info(&process, &output, &error) + ), + }; + let sysroot = PathBuf::from(line); + let sysroot_host_libdir = if cfg!(windows) { + sysroot.join("bin") + } else { + sysroot.join("lib") + }; + let mut sysroot_target_libdir = sysroot.clone(); + sysroot_target_libdir.push("lib"); + sysroot_target_libdir.push("rustlib"); + sysroot_target_libdir.push(match &kind { + CompileKind::Host => rustc.host.as_str(), + CompileKind::Target(target) => target.short_name(), + }); + sysroot_target_libdir.push("lib"); + + let cfg = lines + .map(|line| Ok(Cfg::from_str(line)?)) + .filter(TargetInfo::not_user_specific_cfg) + .collect::>>() + .with_context(|| { + format!( + "failed to parse the cfg from `rustc --print=cfg`, got:\n{}", + output + ) + })?; - Ok(TargetInfo { - crate_type_process, - crate_types: RefCell::new(map), - sysroot, - sysroot_host_libdir, - sysroot_target_libdir, // recalculate `rustflags` from above now that we have `cfg` // information - rustflags: env_args( + let new_flags = extra_args( config, requested_kinds, &rustc.host, Some(&cfg), kind, Flags::Rust, - )?, - rustdocflags: env_args( - config, - requested_kinds, - &rustc.host, - Some(&cfg), - kind, - Flags::Rustdoc, - )?, - cfg, - supports_split_debuginfo, - }) + )?; + + // Tricky: `RUSTFLAGS` defines the set of active `cfg` flags, active + // `cfg` flags define which `.cargo/config` sections apply, and they + // in turn can affect `RUSTFLAGS`! This is a bona fide mutual + // dependency, and it can even diverge (see `cfg_paradox` test). + // + // So what we do here is running at most *two* iterations of + // fixed-point iteration, which should be enough to cover + // practically useful cases, and warn if that's not enough for + // convergence. + let reached_fixed_point = new_flags == rustflags; + if !reached_fixed_point && turn == 0 { + turn += 1; + rustflags = new_flags; + continue; + } + if !reached_fixed_point { + config.shell().warn("non-trivial mutual dependency between target-specific configuration and RUSTFLAGS")?; + } + + return Ok(TargetInfo { + crate_type_process, + crate_types: RefCell::new(map), + sysroot, + sysroot_host_libdir, + sysroot_target_libdir, + rustflags, + rustdocflags: extra_args( + config, + requested_kinds, + &rustc.host, + Some(&cfg), + kind, + Flags::Rustdoc, + )?, + cfg, + supports_split_debuginfo, + }); + } } fn not_user_specific_cfg(cfg: &CargoResult) -> bool { @@ -268,7 +320,7 @@ true } - /// All the target `cfg` settings. + /// All the target [`Cfg`] settings. pub fn cfg(&self) -> &[Cfg] { &self.cfg } @@ -554,6 +606,7 @@ result } +/// Compiler flags for either rustc or rustdoc. #[derive(Debug, Copy, Clone)] enum Flags { Rust, @@ -588,6 +641,7 @@ /// - `target.*.rustflags` from the config (.cargo/config) /// - `target.cfg(..).rustflags` from the config /// - `host.*.rustflags` from the config if compiling a host artifact or without `--target` +/// (requires `-Zhost-config`) /// /// then if none of those were found /// @@ -598,7 +652,7 @@ /// For those artifacts, _only_ `host.*.rustflags` is respected, and no other configuration /// sources, _regardless of the value of `target-applies-to-host`_. This is counterintuitive, but /// necessary to retain backwards compatibility with older versions of Cargo. -fn env_args( +fn extra_args( config: &Config, requested_kinds: &[CompileKind], host_triple: &str, @@ -643,6 +697,8 @@ } } +/// Gets compiler flags from environment variables. +/// See [`extra_args`] for more. fn rustflags_from_env(flags: Flags) -> Option> { // First try CARGO_ENCODED_RUSTFLAGS from the environment. // Prefer this over RUSTFLAGS since it's less prone to encoding errors. @@ -667,6 +723,8 @@ None } +/// Gets compiler flags from `[target]` section in the config. +/// See [`extra_args`] for more. fn rustflags_from_target( config: &Config, host_triple: &str, @@ -708,6 +766,8 @@ } } +/// Gets compiler flags from `[host]` section in the config. +/// See [`extra_args`] for more. fn rustflags_from_host( config: &Config, flag: Flags, @@ -724,6 +784,8 @@ Ok(list.as_ref().map(|l| l.val.as_slice().to_vec())) } +/// Gets compiler flags from `[build]` section in the config. +/// See [`extra_args`] for more. fn rustflags_from_build(config: &Config, flag: Flags) -> CargoResult>> { // Then the `build.rustflags` value. let build = config.build_config()?; @@ -747,11 +809,12 @@ /// `rustc` is invoked without a `--target` flag. This is used for /// procedural macros, build scripts, etc. host_config: TargetConfig, + /// Information about the host platform. host_info: TargetInfo, - /// Build information for targets that we're building for. This will be - /// empty if the `--target` flag is not passed. + /// Build information for targets that we're building for. target_config: HashMap, + /// Information about the target platform that we're building for. target_info: HashMap, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/fingerprint.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/fingerprint.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/fingerprint.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/fingerprint.rs 2023-01-10 13:41:19.000000000 +0000 @@ -559,10 +559,12 @@ } /// Indication of the status on the filesystem for a particular unit. +#[derive(Default)] enum FsStatus { /// This unit is to be considered stale, even if hash information all /// matches. The filesystem inputs have changed (or are missing) and the /// unit needs to subsequently be recompiled. + #[default] Stale, /// This unit is up-to-date. All outputs and their corresponding mtime are @@ -579,12 +581,6 @@ } } -impl Default for FsStatus { - fn default() -> FsStatus { - FsStatus::Stale - } -} - impl Serialize for DepFingerprint { fn serialize(&self, ser: S) -> Result where @@ -1198,7 +1194,7 @@ } => { info!("stale: changed {:?}", stale); info!(" (vs) {:?}", reference); - info!(" {:?} != {:?}", reference_mtime, stale_mtime); + info!(" {:?} < {:?}", reference_mtime, stale_mtime); } StaleItem::ChangedEnv { var, @@ -1497,7 +1493,7 @@ // figure out a better scheme where a package fingerprint // may be a string (like for a registry) or a list of files // (like for a path dependency). Those list of files would - // be stored here rather than the the mtime of them. + // be stored here rather than the mtime of them. Some(f) => { let s = f()?; debug!( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -305,6 +305,19 @@ paths::remove_file(&dst)?; } } + + // Some linkers do not remove the executable, but truncate and modify it. + // That results in the old hard-link being modified even after renamed. + // We delete the old artifact here to prevent this behavior from confusing users. + // See rust-lang/cargo#8348. + if output.hardlink.is_some() && output.path.exists() { + _ = paths::remove_file(&output.path).map_err(|e| { + log::debug!( + "failed to delete previous output file `{:?}`: {e:?}", + output.path + ); + }); + } } fn verbose_if_simple_exit_code(err: Error) -> Error { @@ -995,7 +1008,7 @@ .env("RUSTC_BOOTSTRAP", "1"); } - // Add `CARGO_BIN_` environment variables for building tests. + // Add `CARGO_BIN_EXE_` environment variables for building tests. if unit.target.is_test() || unit.target.is_bench() { for bin_target in unit .pkg @@ -1423,7 +1436,7 @@ let rendered = if options.color { msg.rendered } else { - // Strip only fails if the the Writer fails, which is Cursor + // Strip only fails if the Writer fails, which is Cursor // on a Vec, which should never fail. strip_ansi_escapes::strip(&msg.rendered) .map(|v| String::from_utf8(v).expect("utf8")) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/rustdoc.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/rustdoc.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/rustdoc.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/rustdoc.rs 2023-01-10 13:41:19.000000000 +0000 @@ -128,7 +128,7 @@ if !sid.is_registry() { return false; } - if sid.is_default_registry() { + if sid.is_crates_io() { return registry == CRATES_IO_REGISTRY; } if let Some(index_url) = name2url.get(registry) { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/standard_lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/standard_lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/standard_lib.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/standard_lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -193,10 +193,7 @@ // in time is minimal, and the difference in caching is // significant. let mode = CompileMode::Build; - let features = std_features.activated_features( - pkg.package_id(), - FeaturesFor::NormalOrDevOrArtifactTarget(None), - ); + let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); for kind in kinds { let list = ret.entry(*kind).or_insert_with(Vec::new); let unit_for = UnitFor::new_normal(*kind); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/timings.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/timings.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/timings.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/timings.rs 2023-01-10 13:41:19.000000000 +0000 @@ -272,7 +272,7 @@ Some(state) => state, None => return, }; - // Don't take samples too too frequently, even if requested. + // Don't take samples too frequently, even if requested. let now = Instant::now(); if self.last_cpu_recording.elapsed() < Duration::from_millis(100) { return; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/unit_dependencies.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/unit_dependencies.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/compiler/unit_dependencies.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/compiler/unit_dependencies.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,8 +1,8 @@ -//! Constructs the dependency graph for compilation. +//! # Constructs the dependency graph for compilation //! //! Rust code is typically organized as a set of Cargo packages. The //! dependencies between the packages themselves are stored in the -//! `Resolve` struct. However, we can't use that information as is for +//! [`Resolve`] struct. However, we can't use that information as is for //! compilation! A package typically contains several targets, or crates, //! and these targets has inter-dependencies. For example, you need to //! compile the `lib` target before the `bin` one, and you need to compile @@ -13,7 +13,7 @@ //! is exactly what this module is doing! Well, almost exactly: another //! complication is that we might want to compile the same target several times //! (for example, with and without tests), so we actually build a dependency -//! graph of `Unit`s, which capture these properties. +//! graph of [`Unit`]s, which capture these properties. use std::collections::{HashMap, HashSet}; @@ -35,23 +35,27 @@ const IS_NO_ARTIFACT_DEP: Option<&'static Artifact> = None; -/// Collection of stuff used while creating the `UnitGraph`. +/// Collection of stuff used while creating the [`UnitGraph`]. struct State<'a, 'cfg> { ws: &'a Workspace<'cfg>, config: &'cfg Config, + /// Stores the result of building the [`UnitGraph`]. unit_dependencies: UnitGraph, package_set: &'a PackageSet<'cfg>, usr_resolve: &'a Resolve, usr_features: &'a ResolvedFeatures, + /// Like `usr_resolve` but for building standard library (`-Zbuild-std`). std_resolve: Option<&'a Resolve>, + /// Like `usr_features` but for building standard library (`-Zbuild-std`). std_features: Option<&'a ResolvedFeatures>, - /// This flag is `true` while generating the dependencies for the standard - /// library. + /// `true` while generating the dependencies for the standard library. is_std: bool, + /// The mode we are compiling in. Used for preventing from building lib thrice. global_mode: CompileMode, target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, + // Units for `-Zrustdoc-scrape-examples`. scrape_units: &'a [Unit], /// A set of edges in `unit_dependencies` where (a, b) means that the @@ -73,6 +77,9 @@ } } +/// Then entry point for building a dependency graph of compilation units. +/// +/// You can find some information for arguments from doc of [`State`]. pub fn build_unit_dependencies<'a, 'cfg>( ws: &'a Workspace<'cfg>, package_set: &'a PackageSet<'cfg>, @@ -1015,6 +1022,7 @@ } impl<'a, 'cfg> State<'a, 'cfg> { + /// Gets `std_resolve` during building std, otherwise `usr_resolve`. fn resolve(&self) -> &'a Resolve { if self.is_std { self.std_resolve.unwrap() @@ -1023,6 +1031,7 @@ } } + /// Gets `std_features` during building std, otherwise `usr_features`. fn features(&self) -> &'a ResolvedFeatures { if self.is_std { self.std_features.unwrap() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/features.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/features.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/features.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/features.rs 2023-01-10 13:41:19.000000000 +0000 @@ -34,7 +34,7 @@ //! The steps for adding new Cargo.toml syntax are: //! //! 1. Add the cargo-features unstable gate. Search below for "look here" to -//! find the `features!` macro and add your feature to the list. +//! find the [`features!`] macro invocation and add your feature to the list. //! //! 2. Update the Cargo.toml parsing code to handle your new feature. //! @@ -62,12 +62,11 @@ //! //! 1. Add the option to the [`CliUnstable`] struct below. Flags can take an //! optional value if you want. -//! 2. Update the [`CliUnstable::add`][CliUnstable] function to parse the flag. +//! 2. Update the [`CliUnstable::add`] function to parse the flag. //! 3. Wherever the new functionality is implemented, call -//! [`Config::cli_unstable`][crate::util::config::Config::cli_unstable] to -//! get an instance of `CliUnstable` and check if the option has been -//! enabled on the `CliUnstable` instance. Nightly gating is already -//! handled, so no need to worry about that. +//! [`Config::cli_unstable`] to get an instance of [`CliUnstable`] +//! and check if the option has been enabled on the [`CliUnstable`] instance. +//! Nightly gating is already handled, so no need to worry about that. //! //! ## Stabilization //! @@ -77,13 +76,14 @@ //! The steps for stabilizing are roughly: //! //! 1. Update the feature to be stable, based on the kind of feature: -//! 1. `cargo-features`: Change the feature to `stable` in the `features!` -//! macro below, and include the version and a URL for the documentation. -//! 2. `-Z unstable-options`: Find the call to `fail_if_stable_opt` and +//! 1. `cargo-features`: Change the feature to `stable` in the [`features!`] +//! macro invocation below, and include the version and a URL for the +//! documentation. +//! 2. `-Z unstable-options`: Find the call to [`fail_if_stable_opt`] and //! remove it. Be sure to update the man pages if necessary. -//! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`][CliUnstable] -//! to call `stabilized_warn` or `stabilized_err` and remove the field from -//! `CliUnstable. Remove the `(unstable)` note in the clap help text if +//! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`] to call +//! `stabilized_warn` or `stabilized_err` and remove the field from +//! [`CliUnstable`]. Remove the `(unstable)` note in the clap help text if //! necessary. //! 2. Remove `masquerade_as_nightly_cargo` from any tests, and remove //! `cargo-features` from `Cargo.toml` test files if any. You can @@ -92,6 +92,10 @@ //! 3. Update the docs in unstable.md to move the section to the bottom //! and summarize it similar to the other entries. Update the rest of the //! documentation to add the new feature. +//! +//! [`Config::cli_unstable`]: crate::util::config::Config::cli_unstable +//! [`fail_if_stable_opt`]: CliUnstable::fail_if_stable_opt +//! [`features!`]: macro.features.html use std::collections::BTreeSet; use std::env; @@ -112,7 +116,47 @@ "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ about Rust release channels."; -/// The edition of the compiler (RFC 2052) +/// The edition of the compiler ([RFC 2052]) +/// +/// The following sections will guide you how to add and stabilize an edition. +/// +/// ## Adding a new edition +/// +/// - Add the next edition to the enum. +/// - Update every match expression that now fails to compile. +/// - Update the [`FromStr`] impl. +/// - Update [`CLI_VALUES`] to include the new edition. +/// - Set [`LATEST_UNSTABLE`] to Some with the new edition. +/// - Add an unstable feature to the [`features!`] macro invocation below for the new edition. +/// - Gate on that new feature in [`TomlManifest::to_real_manifest`]. +/// - Update the shell completion files. +/// - Update any failing tests (hopefully there are very few). +/// - Update unstable.md to add a new section for this new edition (see [this example]). +/// +/// ## Stabilization instructions +/// +/// - Set [`LATEST_UNSTABLE`] to None. +/// - Set [`LATEST_STABLE`] to the new version. +/// - Update [`is_stable`] to `true`. +/// - Set the editionNNNN feature to stable in the [`features!`] macro invocation below. +/// - Update any tests that are affected. +/// - Update the man page for the `--edition` flag. +/// - Update unstable.md to move the edition section to the bottom. +/// - Update the documentation: +/// - Update any features impacted by the edition. +/// - Update manifest.md#the-edition-field. +/// - Update the `--edition` flag (options-new.md). +/// - Rebuild man pages. +/// +/// [RFC 2052]: https://rust-lang.github.io/rfcs/2052-epochs.html +/// [`FromStr`]: Edition::from_str +/// [`CLI_VALUES`]: Edition::CLI_VALUES +/// [`LATEST_UNSTABLE`]: Edition::LATEST_UNSTABLE +/// [`LATEST_STABLE`]: Edition::LATEST_STABLE +/// [this example]: https://github.com/rust-lang/cargo/blob/3ebb5f15a940810f250b68821149387af583a79e/src/doc/src/reference/unstable.md?plain=1#L1238-L1264 +/// [`is_stable`]: Edition::is_stable +/// [`TomlManifest::to_real_manifest`]: crate::util::toml::TomlManifest::to_real_manifest +/// [`features!`]: macro.features.html #[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)] pub enum Edition { /// The 2015 edition @@ -123,33 +167,6 @@ Edition2021, } -// Adding a new edition: -// - Add the next edition to the enum. -// - Update every match expression that now fails to compile. -// - Update the `FromStr` impl. -// - Update CLI_VALUES to include the new edition. -// - Set LATEST_UNSTABLE to Some with the new edition. -// - Add an unstable feature to the features! macro below for the new edition. -// - Gate on that new feature in TomlManifest::to_real_manifest. -// - Update the shell completion files. -// - Update any failing tests (hopefully there are very few). -// - Update unstable.md to add a new section for this new edition (see -// https://github.com/rust-lang/cargo/blob/3ebb5f15a940810f250b68821149387af583a79e/src/doc/src/reference/unstable.md?plain=1#L1238-L1264 -// as an example). -// -// Stabilization instructions: -// - Set LATEST_UNSTABLE to None. -// - Set LATEST_STABLE to the new version. -// - Update `is_stable` to `true`. -// - Set the editionNNNN feature to stable in the features macro below. -// - Update any tests that are affected. -// - Update the man page for the --edition flag. -// - Update unstable.md to move the edition section to the bottom. -// - Update the documentation: -// - Update any features impacted by the edition. -// - Update manifest.md#the-edition-field. -// - Update the --edition flag (options-new.md). -// - Rebuild man pages. impl Edition { /// The latest edition that is unstable. /// @@ -661,7 +678,6 @@ jobserver_per_rustc: bool = (HIDDEN), minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"), mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"), - multitarget: bool = ("Allow passing multiple `--target` flags to the cargo subcommand selected"), no_index_update: bool = ("Do not update the registry index even if the cache is outdated"), panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"), host_config: bool = ("Enable the [host] section in the .cargo/config.toml file"), @@ -670,6 +686,7 @@ rustdoc_map: bool = ("Allow passing external documentation mappings to rustdoc"), separate_nightlies: bool = (HIDDEN), terminal_width: Option> = ("Provide a terminal width to rustc for error truncation"), + publish_timeout: bool = ("Enable the `publish.timeout` key in .cargo/config.toml file"), unstable_options: bool = ("Allow the usage of unstable options"), // TODO(wcrichto): move scrape example configuration into Cargo.toml before stabilization // See: https://github.com/rust-lang/cargo/pull/9525#discussion_r728470927 @@ -914,11 +931,15 @@ "jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?, "host-config" => self.host_config = parse_empty(k, v)?, "target-applies-to-host" => self.target_applies_to_host = parse_empty(k, v)?, + "publish-timeout" => self.publish_timeout = parse_empty(k, v)?, "features" => { - // For now this is still allowed (there are still some - // unstable options like "compare"). This should be removed at - // some point, and migrate to a new -Z flag for any future - // things. + // `-Z features` has been stabilized since 1.51, + // but `-Z features=compare` is still allowed for convenience + // to validate that the feature resolver resolves features + // in the same way as the dependency resolver, + // until we feel confident to remove entirely. + // + // See rust-lang/cargo#11168 let feats = parse_features(v); let stab_is_not_empty = feats.iter().any(|feat| { matches!( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/package_id.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/package_id.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/package_id.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/package_id.rs 2023-01-10 13:41:19.000000000 +0000 @@ -211,7 +211,7 @@ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{} v{}", self.inner.name, self.inner.version)?; - if !self.inner.source_id.is_default_registry() { + if !self.inner.source_id.is_crates_io() { write!(f, " ({})", self.inner.source_id)?; } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/package.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/package.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/package.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/package.rs 2023-01-10 13:41:19.000000000 +0000 @@ -27,7 +27,7 @@ use crate::core::{SourceMap, Summary, Workspace}; use crate::ops; use crate::util::config::PackageCacheLock; -use crate::util::errors::{CargoResult, HttpNot200}; +use crate::util::errors::{CargoResult, HttpNotSuccessful}; use crate::util::interning::InternedString; use crate::util::network::Retry; use crate::util::{self, internal, Config, Progress, ProgressStyle}; @@ -868,18 +868,19 @@ let code = handle.response_code()?; if code != 200 && code != 0 { let url = handle.effective_url()?.unwrap_or(url); - return Err(HttpNot200 { + return Err(HttpNotSuccessful { code, url: url.to_string(), + body: data, } .into()); } - Ok(()) + Ok(data) }) .with_context(|| format!("failed to download from `{}`", dl.url))? }; match ret { - Some(()) => break (dl, data), + Some(data) => break (dl, data), None => { self.pending_ids.insert(dl.id); self.enqueue(dl, handle)? diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/profiles.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/profiles.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/profiles.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/profiles.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,3 +1,26 @@ +//! # Profiles: built-in and customizable compiler flag presets +//! +//! [`Profiles`] is a collections of built-in profiles, and profiles defined +//! in the root manifest and configurations. +//! +//! To start using a profile, most of the time you start from [`Profiles::new`], +//! which does the followings: +//! +//! - Create a `Profiles` by merging profiles from configs onto the profile +//! from root mainfest (see [`merge_config_profiles`]). +//! - Add built-in profiles onto it (see [`Profiles::add_root_profiles`]). +//! - Process profile inheritance for each profiles. (see [`Profiles::add_maker`]). +//! +//! Then you can query a [`Profile`] via [`Profiles::get_profile`], which respects +//! the profile overriden hierarchy described in below. The [`Profile`] you get +//! is basically an immutable struct containing the compiler flag presets. +//! +//! ## Profile overridden hierarchy +//! +//! Profile settings can be overridden for specific packages and build-time crates. +//! The precedence is explained in [`ProfileMaker`]. +//! The algorithm happens within [`ProfileMaker::get_profile`]. + use crate::core::compiler::{CompileKind, CompileTarget, Unit}; use crate::core::dependency::Artifact; use crate::core::resolver::features::FeaturesFor; @@ -11,6 +34,10 @@ use std::{cmp, env, fmt, hash}; /// Collection of all profiles. +/// +/// To get a specific [`Profile`], you usually create this and call [`get_profile`] then. +/// +/// [`get_profile`]: Profiles::get_profile #[derive(Clone, Debug)] pub struct Profiles { /// Incremental compilation can be overridden globally via: @@ -355,12 +382,13 @@ /// An object used for handling the profile hierarchy. /// /// The precedence of profiles are (first one wins): +/// /// - Profiles in `.cargo/config` files (using same order as below). -/// - [profile.dev.package.name] -- a named package. -/// - [profile.dev.package."*"] -- this cannot apply to workspace members. -/// - [profile.dev.build-override] -- this can only apply to `build.rs` scripts +/// - `[profile.dev.package.name]` -- a named package. +/// - `[profile.dev.package."*"]` -- this cannot apply to workspace members. +/// - `[profile.dev.build-override]` -- this can only apply to `build.rs` scripts /// and their dependencies. -/// - [profile.dev] +/// - `[profile.dev]` /// - Default (hard-coded) values. #[derive(Debug, Clone)] struct ProfileMaker { @@ -636,6 +664,7 @@ } impl Profile { + /// Returns a built-in `dev` profile. fn default_dev() -> Profile { Profile { name: InternedString::new("dev"), @@ -648,6 +677,7 @@ } } + /// Returns a built-in `release` profile. fn default_release() -> Profile { Profile { name: InternedString::new("release"), @@ -797,9 +827,7 @@ /// build.rs` is HOST=true, HOST_FEATURES=false for the same reasons that /// foo's build script is set that way. host_features: bool, - /// How Cargo processes the `panic` setting or profiles. This is done to - /// handle test/benches inheriting from dev/release, as well as forcing - /// `for_host` units to always unwind. + /// How Cargo processes the `panic` setting or profiles. panic_setting: PanicSetting, /// The compile kind of the root unit for which artifact dependencies are built. @@ -821,6 +849,13 @@ artifact_target_for_features: Option, } +/// How Cargo processes the `panic` setting or profiles. +/// +/// This is done to handle test/benches inheriting from dev/release, +/// as well as forcing `for_host` units to always unwind. +/// It also interacts with [`-Z panic-abort-tests`]. +/// +/// [`-Z panic-abort-tests`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#panic-abort-tests #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] enum PanicSetting { /// Used to force a unit to always be compiled with the `panic=unwind` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/registry.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/registry.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,5 +1,5 @@ use std::collections::{HashMap, HashSet}; -use std::task::Poll; +use std::task::{ready, Poll}; use crate::core::PackageSet; use crate::core::{Dependency, PackageId, QueryKind, Source, SourceId, SourceMap, Summary}; @@ -482,10 +482,7 @@ for &s in self.overrides.iter() { let src = self.sources.get_mut(s).unwrap(); let dep = Dependency::new_override(dep.package_name(), s); - let mut results = match src.query_vec(&dep, QueryKind::Exact) { - Poll::Ready(results) => results?, - Poll::Pending => return Poll::Pending, - }; + let mut results = ready!(src.query_vec(&dep, QueryKind::Exact))?; if !results.is_empty() { return Poll::Ready(Ok(Some(results.remove(0)))); } @@ -580,10 +577,7 @@ assert!(self.patches_locked); let (override_summary, n, to_warn) = { // Look for an override and get ready to query the real source. - let override_summary = match self.query_overrides(dep) { - Poll::Ready(override_summary) => override_summary?, - Poll::Pending => return Poll::Pending, - }; + let override_summary = ready!(self.query_overrides(dep))?; // Next up on our list of candidates is to check the `[patch]` // section of the manifest. Here we look through all patches @@ -880,23 +874,17 @@ // No summaries found, try to help the user figure out what is wrong. if let Some(locked) = locked { // Since the locked patch did not match anything, try the unlocked one. - let orig_matches = match source.query_vec(orig_patch, QueryKind::Exact) { - Poll::Pending => return Poll::Pending, - Poll::Ready(deps) => deps, - } - .unwrap_or_else(|e| { - log::warn!( - "could not determine unlocked summaries for dep {:?}: {:?}", - orig_patch, - e - ); - Vec::new() - }); + let orig_matches = + ready!(source.query_vec(orig_patch, QueryKind::Exact)).unwrap_or_else(|e| { + log::warn!( + "could not determine unlocked summaries for dep {:?}: {:?}", + orig_patch, + e + ); + Vec::new() + }); - let summary = match summary_for_patch(orig_patch, &None, orig_matches, source) { - Poll::Pending => return Poll::Pending, - Poll::Ready(summary) => summary?, - }; + let summary = ready!(summary_for_patch(orig_patch, &None, orig_matches, source))?; // The unlocked version found a match. This returns a value to // indicate that this entry should be unlocked. @@ -905,18 +893,15 @@ // Try checking if there are *any* packages that match this by name. let name_only_dep = Dependency::new_override(orig_patch.package_name(), orig_patch.source_id()); - let name_summaries = match source.query_vec(&name_only_dep, QueryKind::Exact) { - Poll::Pending => return Poll::Pending, - Poll::Ready(deps) => deps, - } - .unwrap_or_else(|e| { - log::warn!( - "failed to do name-only summary query for {:?}: {:?}", - name_only_dep, - e - ); - Vec::new() - }); + let name_summaries = + ready!(source.query_vec(&name_only_dep, QueryKind::Exact)).unwrap_or_else(|e| { + log::warn!( + "failed to do name-only summary query for {:?}: {:?}", + name_only_dep, + e + ); + Vec::new() + }); let mut vers = name_summaries .iter() .map(|summary| summary.version()) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/dep_cache.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/dep_cache.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/dep_cache.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/dep_cache.rs 2023-01-10 13:41:19.000000000 +0000 @@ -109,7 +109,7 @@ self.registry_cache.insert(dep.clone(), Poll::Pending); return Poll::Pending; } - for summary in ret.iter_mut() { + for summary in ret.iter() { let mut potential_matches = self .replacements .iter() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/features.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/features.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/features.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/features.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,17 +1,16 @@ -//! Feature resolver. +//! # Feature resolver //! -//! This is a new feature resolver that runs independently of the main +//! This is a [new feature resolver] that runs independently of the main //! dependency resolver. It has several options which can enable new feature //! resolution behavior. //! //! One of its key characteristics is that it can avoid unifying features for -//! shared dependencies in some situations. See `FeatureOpts` for the +//! shared dependencies in some situations. See [`FeatureOpts`] for the //! different behaviors that can be enabled. If no extra options are enabled, //! then it should behave exactly the same as the dependency resolver's //! feature resolution. //! -//! The preferred way to engage this new resolver is via -//! `resolve_ws_with_opts`. +//! The preferred way to engage this new resolver is via [`resolve_ws_with_opts`]. //! //! This does not *replace* feature resolution in the dependency resolver, but //! instead acts as a second pass which can *narrow* the features selected in @@ -24,11 +23,19 @@ //! we could experiment with that, but it seems unlikely to work or be all //! that helpful. //! -//! There are many assumptions made about the dependency resolver. This -//! feature resolver assumes validation has already been done on the feature -//! maps, and doesn't do any validation itself. It assumes dev-dependencies -//! within a dependency have been removed. There are probably other -//! assumptions that I am forgetting. +//! ## Assumptions +//! +//! There are many assumptions made about the dependency resolver: +//! +//! * Assumes feature validation has already been done during the construction +//! of feature maps, so the feature resolver doesn't do that validation at all. +//! * Assumes `dev-dependencies` within a dependency have been removed +//! in the given [`Resolve`]. +//! +//! There are probably other assumptions that I am forgetting. +//! +//! [new feature resolver]: https://doc.rust-lang.org/nightly/cargo/reference/resolver.html#feature-resolver-version-2 +//! [`resolve_ws_with_opts`]: crate::ops::resolve_ws_with_opts use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData}; use crate::core::dependency::{ArtifactTarget, DepKind, Dependency}; @@ -42,12 +49,9 @@ use std::rc::Rc; /// The key used in various places to store features for a particular dependency. -/// The actual discrimination happens with the `FeaturesFor` type. +/// The actual discrimination happens with the [`FeaturesFor`] type. type PackageFeaturesKey = (PackageId, FeaturesFor); /// Map of activated features. -/// -/// The key is `(PackageId, bool)` where the bool is `true` if these -/// are features for a build dependency or proc-macro. type ActivateMap = HashMap>; /// Set of all activated features for all packages in the resolve graph. @@ -63,7 +67,7 @@ /// Options for how the feature resolver works. #[derive(Default)] pub struct FeatureOpts { - /// Build deps and proc-macros will not share share features with other dep kinds, + /// Build deps and proc-macros will not share features with other dep kinds, /// and so won't artifact targets. /// In other terms, if true, features associated with certain kinds of dependencies /// will only be unified together. @@ -98,30 +102,29 @@ No, } -/// Flag to indicate if features are requested for a build dependency or not. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] +/// Flag to indicate if features are requested for a certain type of dependency. +/// +/// This is primarily used for constructing a [`PackageFeaturesKey`] to decouple +/// activated features of the same package with different types of dependency. +#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum FeaturesFor { - /// If `Some(target)` is present, we represent an artifact target. - /// Otherwise any other normal or dev dependency. - NormalOrDevOrArtifactTarget(Option), + /// Normal or dev dependency. + #[default] + NormalOrDev, /// Build dependency or proc-macro. HostDep, -} - -impl Default for FeaturesFor { - fn default() -> Self { - FeaturesFor::NormalOrDevOrArtifactTarget(None) - } + /// Any dependency with both artifact and target specified. + /// + /// That is, `dep = { …, artifact = , target = }` + ArtifactDep(CompileTarget), } impl std::fmt::Display for FeaturesFor { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { FeaturesFor::HostDep => f.write_str("host"), - FeaturesFor::NormalOrDevOrArtifactTarget(Some(target)) => { - f.write_str(&target.rustc_target()) - } - FeaturesFor::NormalOrDevOrArtifactTarget(None) => Ok(()), + FeaturesFor::ArtifactDep(target) => f.write_str(&target.rustc_target()), + FeaturesFor::NormalOrDev => Ok(()), } } } @@ -131,7 +134,7 @@ if for_host { FeaturesFor::HostDep } else { - FeaturesFor::NormalOrDevOrArtifactTarget(None) + FeaturesFor::NormalOrDev } } @@ -140,12 +143,12 @@ artifact_target: Option, ) -> FeaturesFor { match artifact_target { - Some(target) => FeaturesFor::NormalOrDevOrArtifactTarget(Some(target)), + Some(target) => FeaturesFor::ArtifactDep(target), None => { if for_host { FeaturesFor::HostDep } else { - FeaturesFor::NormalOrDevOrArtifactTarget(None) + FeaturesFor::NormalOrDev } } } @@ -396,6 +399,12 @@ /// Key is `(pkg_id, for_host)`. Value is a set of features or dependencies removed. pub type DiffMap = BTreeMap>; +/// The new feature resolver that [`resolve`]s your project. +/// +/// For more information, please see the [module-level documentation]. +/// +/// [`resolve`]: Self::resolve +/// [module-level documentation]: crate::core::resolver::features pub struct FeatureResolver<'a, 'cfg> { ws: &'a Workspace<'cfg>, target_data: &'a RustcTargetData<'cfg>, @@ -431,7 +440,7 @@ } impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { - /// Runs the resolution algorithm and returns a new `ResolvedFeatures` + /// Runs the resolution algorithm and returns a new [`ResolvedFeatures`] /// with the result. pub fn resolve( ws: &Workspace<'cfg>, @@ -498,6 +507,10 @@ Ok(()) } + /// Activates [`FeatureValue`]s on the given package. + /// + /// This is the main entrance into the recursion of feature activation + /// for a package. fn activate_pkg( &mut self, pkg_id: PackageId, @@ -772,11 +785,11 @@ self.target_data .dep_platform_activated(dep, CompileKind::Host) } - (_, FeaturesFor::NormalOrDevOrArtifactTarget(None)) => self + (_, FeaturesFor::NormalOrDev) => self .requested_targets .iter() .any(|kind| self.target_data.dep_platform_activated(dep, *kind)), - (_, FeaturesFor::NormalOrDevOrArtifactTarget(Some(target))) => self + (_, FeaturesFor::ArtifactDep(target)) => self .target_data .dep_platform_activated(dep, CompileKind::Target(target)), } @@ -835,7 +848,7 @@ artifact.is_lib(), artifact.target().map(|target| match target { ArtifactTarget::Force(target) => { - vec![FeaturesFor::NormalOrDevOrArtifactTarget(Some(target))] + vec![FeaturesFor::ArtifactDep(target)] } ArtifactTarget::BuildDependencyAssumeTarget => self .requested_targets @@ -843,9 +856,7 @@ .filter_map(|kind| match kind { CompileKind::Host => None, CompileKind::Target(target) => { - Some(FeaturesFor::NormalOrDevOrArtifactTarget( - Some(*target), - )) + Some(FeaturesFor::ArtifactDep(*target)) } }) .collect(), diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -416,7 +416,7 @@ // global cache which lists sets of packages where, when // activated, the dependency is unresolvable. // - // If any our our frame's dependencies fit in that bucket, + // If any our frame's dependencies fit in that bucket, // aka known unresolvable, then we extend our own set of // conflicting activations with theirs. We can do this // because the set of conflicts we found implies the diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/resolve.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/resolve.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/resolver/resolve.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/resolver/resolve.rs 2023-01-10 13:41:19.000000000 +0000 @@ -46,25 +46,39 @@ summaries: HashMap, } -/// A version to indicate how a `Cargo.lock` should be serialized. Currently -/// V2 is the default when creating a new lockfile. If a V1 lockfile already -/// exists, it will stay as V1. +/// A version to indicate how a `Cargo.lock` should be serialized. +/// +/// When creating a new lockfile, the version with `#[default]` is used. +/// If an old version of lockfile already exists, it will stay as-is. +/// +/// It's important that if a new version is added that this is not updated +/// until *at least* the support for the version is in the stable release of Rust. +/// +/// This resolve version will be used for all new lock files, for example +/// those generated by `cargo update` (update everything) or building after +/// a `cargo new` (where no lock file previously existed). This is also used +/// for *updated* lock files such as when a dependency is added or when a +/// version requirement changes. In this situation Cargo's updating the lock +/// file anyway so it takes the opportunity to bump the lock file version +/// forward. /// /// It's theorized that we can add more here over time to track larger changes /// to the `Cargo.lock` format, but we've yet to see how that strategy pans out. -#[derive(PartialEq, Eq, Clone, Copy, Debug, PartialOrd, Ord)] +#[derive(Default, PartialEq, Eq, Clone, Copy, Debug, PartialOrd, Ord)] pub enum ResolveVersion { /// Historical baseline for when this abstraction was added. V1, /// A more compact format, more amenable to avoiding source-control merge /// conflicts. The `dependencies` arrays are compressed and checksums are /// listed inline. Introduced in 2019 in version 1.38. New lockfiles use - /// V2 by default starting in 1.41. + /// V2 by default from 1.41 to 1.52. V2, /// A format that explicitly lists a `version` at the top of the file as /// well as changing how git dependencies are encoded. Dependencies with /// `branch = "master"` are no longer encoded the same way as those without - /// branch specifiers. + /// branch specifiers. Introduced in 2020 in version 1.47. New lockfiles use + /// V3 by default staring in 1.53. + #[default] V3, } @@ -391,22 +405,3 @@ write!(fmt, "}}") } } - -impl Default for ResolveVersion { - /// The default way to encode new or updated `Cargo.lock` files. - /// - /// It's important that if a new version of `ResolveVersion` is added that - /// this is not updated until *at least* the support for the version is in - /// the stable release of Rust. - /// - /// This resolve version will be used for all new lock files, for example - /// those generated by `cargo update` (update everything) or building after - /// a `cargo new` (where no lock file previously existed). This is also used - /// for *updated* lock files such as when a dependency is added or when a - /// version requirement changes. In this situation Cargo's updating the lock - /// file anyway so it takes the opportunity to bump the lock file version - /// forward. - fn default() -> ResolveVersion { - ResolveVersion::V3 - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/source/source_id.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/source/source_id.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/source/source_id.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/source/source_id.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use crate::sources::registry::CRATES_IO_HTTP_INDEX; use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::sources::{GitSource, PathSource, RegistrySource}; -use crate::util::{CanonicalUrl, CargoResult, Config, IntoUrl}; +use crate::util::{config, CanonicalUrl, CargoResult, Config, IntoUrl}; use log::trace; use serde::de; use serde::ser; @@ -39,6 +39,10 @@ /// WARNING: this is not always set for alt-registries when the name is /// not known. name: Option, + /// Name of the alt registry in the `[registries]` table. + /// WARNING: this is not always set for alt-registries when the name is + /// not known. + alt_registry_key: Option, } /// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the @@ -51,6 +55,8 @@ Path, /// A remote registry. Registry, + /// A sparse registry. + SparseRegistry, /// A local filesystem-based registry. LocalRegistry, /// A directory-based registry. @@ -81,6 +87,7 @@ url, precise: None, name: name.map(|n| n.into()), + alt_registry_key: None, }); Ok(source_id) } @@ -95,6 +102,20 @@ SourceId { inner } } + fn remote_source_kind(url: &Url) -> (SourceKind, Url) { + if url.as_str().starts_with("sparse+") { + let url = url + .to_string() + .strip_prefix("sparse+") + .expect("we just found that prefix") + .into_url() + .expect("a valid url without a protocol specifier should still be valid"); + (SourceKind::SparseRegistry, url) + } else { + (SourceKind::Registry, url.to_owned()) + } + } + /// Parses a source URL and returns the corresponding ID. /// /// ## Example @@ -137,8 +158,8 @@ .with_precise(Some("locked".to_string()))) } "sparse" => { - let url = string.into_url()?; - Ok(SourceId::new(SourceKind::Registry, url, None)? + let url = url.into_url()?; + Ok(SourceId::new(SourceKind::SparseRegistry, url, None)? .with_precise(Some("locked".to_string()))) } "path" => { @@ -175,12 +196,14 @@ /// Use [`SourceId::for_alt_registry`] if a name can provided, which /// generates better messages for cargo. pub fn for_registry(url: &Url) -> CargoResult { - SourceId::new(SourceKind::Registry, url.clone(), None) + let (kind, url) = Self::remote_source_kind(url); + SourceId::new(kind, url, None) } /// Creates a `SourceId` from a remote registry URL with given name. pub fn for_alt_registry(url: &Url, name: &str) -> CargoResult { - SourceId::new(SourceKind::Registry, url.clone(), Some(name)) + let (kind, url) = Self::remote_source_kind(url); + SourceId::new(kind, url, Some(name)) } /// Creates a SourceId from a local registry path. @@ -210,24 +233,44 @@ /// Returns the `SourceId` corresponding to the main repository, using the /// sparse HTTP index if allowed. pub fn crates_io_maybe_sparse_http(config: &Config) -> CargoResult { - if config.cli_unstable().sparse_registry { + if Self::crates_io_is_sparse(config)? { config.check_registry_index_not_set()?; let url = CRATES_IO_HTTP_INDEX.into_url().unwrap(); - SourceId::new(SourceKind::Registry, url, Some(CRATES_IO_REGISTRY)) + SourceId::new(SourceKind::SparseRegistry, url, Some(CRATES_IO_REGISTRY)) } else { Self::crates_io(config) } } + /// Returns whether to access crates.io over the sparse protocol. + pub fn crates_io_is_sparse(config: &Config) -> CargoResult { + let proto: Option> = config.get("registries.crates-io.protocol")?; + let is_sparse = match proto.as_ref().map(|v| v.val.as_str()) { + Some("sparse") => true, + Some("git") => false, + Some(unknown) => anyhow::bail!( + "unsupported registry protocol `{unknown}` (defined in {})", + proto.as_ref().unwrap().definition + ), + None => config.cli_unstable().sparse_registry, + }; + Ok(is_sparse) + } + /// Gets the `SourceId` associated with given name of the remote registry. pub fn alt_registry(config: &Config, key: &str) -> CargoResult { + if key == CRATES_IO_REGISTRY { + return Self::crates_io(config); + } let url = config.get_registry_index(key)?; + let (kind, url) = Self::remote_source_kind(&url); Ok(SourceId::wrap(SourceIdInner { - kind: SourceKind::Registry, + kind, canonical_url: CanonicalUrl::new(&url)?, url, precise: None, name: Some(key.to_string()), + alt_registry_key: Some(key.to_string()), })) } @@ -243,7 +286,7 @@ } pub fn display_index(self) -> String { - if self.is_default_registry() { + if self.is_crates_io() { format!("{} index", CRATES_IO_DOMAIN) } else { format!("`{}` index", self.display_registry_name()) @@ -251,7 +294,7 @@ } pub fn display_registry_name(self) -> String { - if self.is_default_registry() { + if self.is_crates_io() { CRATES_IO_REGISTRY.to_string() } else if let Some(name) = &self.inner.name { name.clone() @@ -264,6 +307,13 @@ } } + /// Gets the name of the remote registry as defined in the `[registries]` table. + /// WARNING: alt registries that come from Cargo.lock, or --index will + /// not have a name. + pub fn alt_registry_key(&self) -> Option<&str> { + self.inner.alt_registry_key.as_deref() + } + /// Returns `true` if this source is from a filesystem path. pub fn is_path(self) -> bool { self.inner.kind == SourceKind::Path @@ -282,16 +332,24 @@ pub fn is_registry(self) -> bool { matches!( self.inner.kind, - SourceKind::Registry | SourceKind::LocalRegistry + SourceKind::Registry | SourceKind::SparseRegistry | SourceKind::LocalRegistry ) } + /// Returns `true` if this source is from a sparse registry. + pub fn is_sparse(self) -> bool { + matches!(self.inner.kind, SourceKind::SparseRegistry) + } + /// Returns `true` if this source is a "remote" registry. /// /// "remote" may also mean a file URL to a git index, so it is not /// necessarily "remote". This just means it is not `local-registry`. pub fn is_remote_registry(self) -> bool { - matches!(self.inner.kind, SourceKind::Registry) + matches!( + self.inner.kind, + SourceKind::Registry | SourceKind::SparseRegistry + ) } /// Returns `true` if this source from a Git repository. @@ -315,11 +373,9 @@ }; Ok(Box::new(PathSource::new(&path, self, config))) } - SourceKind::Registry => Ok(Box::new(RegistrySource::remote( - self, - yanked_whitelist, - config, - )?)), + SourceKind::Registry | SourceKind::SparseRegistry => Ok(Box::new( + RegistrySource::remote(self, yanked_whitelist, config)?, + )), SourceKind::LocalRegistry => { let path = match self.inner.url.to_file_path() { Ok(p) => p, @@ -364,13 +420,18 @@ } /// Returns `true` if the remote registry is the standard . - pub fn is_default_registry(self) -> bool { + pub fn is_crates_io(self) -> bool { match self.inner.kind { - SourceKind::Registry => {} + SourceKind::Registry | SourceKind::SparseRegistry => {} _ => return false, } let url = self.inner.url.as_str(); - url == CRATES_IO_INDEX || url == CRATES_IO_HTTP_INDEX + url == CRATES_IO_INDEX + || url == CRATES_IO_HTTP_INDEX + || std::env::var("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS") + .as_deref() + .map(|u| u.trim_start_matches("sparse+")) + == Ok(url) } /// Hashes `self`. @@ -496,7 +557,9 @@ Ok(()) } SourceKind::Path => write!(f, "{}", url_display(&self.inner.url)), - SourceKind::Registry => write!(f, "registry `{}`", self.display_registry_name()), + SourceKind::Registry | SourceKind::SparseRegistry => { + write!(f, "registry `{}`", self.display_registry_name()) + } SourceKind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)), SourceKind::Directory => write!(f, "dir {}", url_display(&self.inner.url)), } @@ -610,6 +673,10 @@ (SourceKind::Registry, _) => Ordering::Less, (_, SourceKind::Registry) => Ordering::Greater, + (SourceKind::SparseRegistry, SourceKind::SparseRegistry) => Ordering::Equal, + (SourceKind::SparseRegistry, _) => Ordering::Less, + (_, SourceKind::SparseRegistry) => Ordering::Greater, + (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal, (SourceKind::LocalRegistry, _) => Ordering::Less, (_, SourceKind::LocalRegistry) => Ordering::Greater, @@ -680,7 +747,16 @@ kind: SourceKind::Registry, ref url, .. - } => write!(f, "registry+{}", url), + } => { + write!(f, "registry+{url}") + } + SourceIdInner { + kind: SourceKind::SparseRegistry, + ref url, + .. + } => { + write!(f, "sparse+{url}") + } SourceIdInner { kind: SourceKind::LocalRegistry, ref url, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/summary.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/summary.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/core/summary.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/core/summary.rs 2023-01-10 13:41:19.000000000 +0000 @@ -277,6 +277,36 @@ feature ); } + + // dep: cannot be combined with / + if let Some(stripped_dep) = dep_name.strip_prefix("dep:") { + let has_other_dep = explicitly_listed.contains(stripped_dep); + let is_optional = dep_map + .get(stripped_dep) + .iter() + .flat_map(|d| d.iter()) + .any(|d| d.is_optional()); + let extra_help = if *weak || has_other_dep || !is_optional { + // In this case, the user should just remove dep:. + // Note that "hiding" an optional dependency + // wouldn't work with just a single `dep:foo?/bar` + // because there would not be any way to enable + // `foo`. + String::new() + } else { + format!( + "\nIf the intent is to avoid creating an implicit feature \ + `{stripped_dep}` for an optional dependency, \ + then consider replacing this with two values:\n \ + \"dep:{stripped_dep}\", \"{stripped_dep}/{dep_feature}\"" + ) + }; + bail!( + "feature `{feature}` includes `{fv}` with both `dep:` and `/`\n\ + To fix this, remove the `dep:` prefix.{extra_help}" + ) + } + // Validation of the feature name will be performed in the resolver. if !is_any_dep { bail!( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/crate_spec.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/crate_spec.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/crate_spec.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/crate_spec.rs 2023-01-10 13:41:19.000000000 +0000 @@ -3,7 +3,7 @@ use anyhow::Context as _; use super::Dependency; -use super::RegistrySource; +use crate::util::toml_mut::dependency::RegistrySource; use crate::util::validate_package_name; use crate::CargoResult; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/dependency.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/dependency.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/dependency.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/dependency.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1127 +0,0 @@ -use std::fmt::{Display, Formatter}; -use std::path::{Path, PathBuf}; - -use indexmap::IndexSet; -use toml_edit::KeyMut; - -use super::manifest::str_or_1_len_table; -use crate::core::GitReference; -use crate::core::SourceId; -use crate::core::Summary; -use crate::CargoResult; -use crate::Config; - -/// A dependency handled by Cargo -/// -/// `None` means the field will be blank in TOML -#[derive(Debug, PartialEq, Eq, Clone)] -#[non_exhaustive] -pub struct Dependency { - /// The name of the dependency (as it is set in its `Cargo.toml` and known to crates.io) - pub name: String, - /// Whether the dependency is opted-in with a feature flag - pub optional: Option, - - /// List of features to add (or None to keep features unchanged). - pub features: Option>, - /// Whether default features are enabled - pub default_features: Option, - /// List of features inherited from a workspace dependency - pub inherited_features: Option>, - - /// Where the dependency comes from - pub source: Option, - /// Non-default registry - pub registry: Option, - - /// If the dependency is renamed, this is the new name for the dependency - /// as a string. None if it is not renamed. - pub rename: Option, -} - -impl Dependency { - /// Create a new dependency with a name - pub fn new(name: &str) -> Self { - Self { - name: name.into(), - optional: None, - features: None, - default_features: None, - inherited_features: None, - source: None, - registry: None, - rename: None, - } - } - - /// Set dependency to a given version - pub fn set_source(mut self, source: impl Into) -> Self { - self.source = Some(source.into()); - self - } - - /// Remove the existing version requirement - pub fn clear_version(mut self) -> Self { - match &mut self.source { - Some(Source::Registry(_)) => { - self.source = None; - } - Some(Source::Path(path)) => { - path.version = None; - } - Some(Source::Git(git)) => { - git.version = None; - } - Some(Source::Workspace(_workspace)) => {} - None => {} - } - self - } - - /// Set whether the dependency is optional - #[allow(dead_code)] - pub fn set_optional(mut self, opt: bool) -> Self { - self.optional = Some(opt); - self - } - - /// Set features as an array of string (does some basic parsing) - #[allow(dead_code)] - pub fn set_features(mut self, features: IndexSet) -> Self { - self.features = Some(features); - self - } - /// Set features as an array of string (does some basic parsing) - pub fn extend_features(mut self, features: impl IntoIterator) -> Self { - self.features - .get_or_insert_with(Default::default) - .extend(features); - self - } - - /// Set the value of default-features for the dependency - #[allow(dead_code)] - pub fn set_default_features(mut self, default_features: bool) -> Self { - self.default_features = Some(default_features); - self - } - - /// Set the alias for the dependency - pub fn set_rename(mut self, rename: &str) -> Self { - self.rename = Some(rename.into()); - self - } - - /// Set the value of registry for the dependency - pub fn set_registry(mut self, registry: impl Into) -> Self { - self.registry = Some(registry.into()); - self - } - - /// Set features as an array of string (does some basic parsing) - pub fn set_inherited_features(mut self, features: IndexSet) -> Self { - self.inherited_features = Some(features); - self - } - - /// Get the dependency source - pub fn source(&self) -> Option<&Source> { - self.source.as_ref() - } - - /// Get version of dependency - pub fn version(&self) -> Option<&str> { - match self.source()? { - Source::Registry(src) => Some(src.version.as_str()), - Source::Path(src) => src.version.as_deref(), - Source::Git(src) => src.version.as_deref(), - Source::Workspace(_) => None, - } - } - - /// Get registry of the dependency - pub fn registry(&self) -> Option<&str> { - self.registry.as_deref() - } - - /// Get the alias for the dependency (if any) - pub fn rename(&self) -> Option<&str> { - self.rename.as_deref() - } - - /// Whether default features are activated - pub fn default_features(&self) -> Option { - self.default_features - } - - /// Get whether the dep is optional - pub fn optional(&self) -> Option { - self.optional - } - - /// Get the SourceID for this dependency - pub fn source_id(&self, config: &Config) -> CargoResult> { - match &self.source.as_ref() { - Some(Source::Registry(_)) | None => { - if let Some(r) = self.registry() { - let source_id = SourceId::alt_registry(config, r)?; - Ok(MaybeWorkspace::Other(source_id)) - } else { - let source_id = SourceId::crates_io(config)?; - Ok(MaybeWorkspace::Other(source_id)) - } - } - Some(Source::Path(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)), - Some(Source::Git(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)), - Some(Source::Workspace(workspace)) => Ok(MaybeWorkspace::Workspace(workspace.clone())), - } - } - - /// Query to find this dependency - pub fn query( - &self, - config: &Config, - ) -> CargoResult> { - let source_id = self.source_id(config)?; - match source_id { - MaybeWorkspace::Workspace(workspace) => Ok(MaybeWorkspace::Workspace(workspace)), - MaybeWorkspace::Other(source_id) => Ok(MaybeWorkspace::Other( - crate::core::dependency::Dependency::parse( - self.name.as_str(), - self.version(), - source_id, - )?, - )), - } - } -} - -pub enum MaybeWorkspace { - Workspace(WorkspaceSource), - Other(T), -} - -impl Dependency { - /// Create a dependency from a TOML table entry - pub fn from_toml(crate_root: &Path, key: &str, item: &toml_edit::Item) -> CargoResult { - if let Some(version) = item.as_str() { - let dep = Self::new(key).set_source(RegistrySource::new(version)); - Ok(dep) - } else if let Some(table) = item.as_table_like() { - let (name, rename) = if let Some(value) = table.get("package") { - ( - value - .as_str() - .ok_or_else(|| invalid_type(key, "package", value.type_name(), "string"))? - .to_owned(), - Some(key.to_owned()), - ) - } else { - (key.to_owned(), None) - }; - - let source: Source = - if let Some(git) = table.get("git") { - let mut src = GitSource::new( - git.as_str() - .ok_or_else(|| invalid_type(key, "git", git.type_name(), "string"))?, - ); - if let Some(value) = table.get("branch") { - src = src.set_branch(value.as_str().ok_or_else(|| { - invalid_type(key, "branch", value.type_name(), "string") - })?); - } - if let Some(value) = table.get("tag") { - src = src.set_tag(value.as_str().ok_or_else(|| { - invalid_type(key, "tag", value.type_name(), "string") - })?); - } - if let Some(value) = table.get("rev") { - src = src.set_rev(value.as_str().ok_or_else(|| { - invalid_type(key, "rev", value.type_name(), "string") - })?); - } - if let Some(value) = table.get("version") { - src = src.set_version(value.as_str().ok_or_else(|| { - invalid_type(key, "version", value.type_name(), "string") - })?); - } - src.into() - } else if let Some(path) = table.get("path") { - let path = crate_root - .join(path.as_str().ok_or_else(|| { - invalid_type(key, "path", path.type_name(), "string") - })?); - let mut src = PathSource::new(path); - if let Some(value) = table.get("version") { - src = src.set_version(value.as_str().ok_or_else(|| { - invalid_type(key, "version", value.type_name(), "string") - })?); - } - src.into() - } else if let Some(version) = table.get("version") { - let src = RegistrySource::new(version.as_str().ok_or_else(|| { - invalid_type(key, "version", version.type_name(), "string") - })?); - src.into() - } else if let Some(workspace) = table.get("workspace") { - let workspace_bool = workspace.as_bool().ok_or_else(|| { - invalid_type(key, "workspace", workspace.type_name(), "bool") - })?; - if !workspace_bool { - anyhow::bail!("`{key}.workspace = false` is unsupported") - } - let src = WorkspaceSource::new(); - src.into() - } else { - anyhow::bail!("Unrecognized dependency source for `{key}`"); - }; - let registry = if let Some(value) = table.get("registry") { - Some( - value - .as_str() - .ok_or_else(|| invalid_type(key, "registry", value.type_name(), "string"))? - .to_owned(), - ) - } else { - None - }; - - let default_features = table.get("default-features").and_then(|v| v.as_bool()); - if table.contains_key("default_features") { - anyhow::bail!("Use of `default_features` in `{key}` is unsupported, please switch to `default-features`"); - } - - let features = if let Some(value) = table.get("features") { - Some( - value - .as_array() - .ok_or_else(|| invalid_type(key, "features", value.type_name(), "array"))? - .iter() - .map(|v| { - v.as_str().map(|s| s.to_owned()).ok_or_else(|| { - invalid_type(key, "features", v.type_name(), "string") - }) - }) - .collect::>>()?, - ) - } else { - None - }; - - let optional = table.get("optional").and_then(|v| v.as_bool()); - - let dep = Self { - name, - rename, - source: Some(source), - registry, - default_features, - features, - optional, - inherited_features: None, - }; - Ok(dep) - } else { - anyhow::bail!("Unrecognized` dependency entry format for `{key}"); - } - } - - /// Get the dependency name as defined in the manifest, - /// that is, either the alias (rename field if Some), - /// or the official package name (name field). - pub fn toml_key(&self) -> &str { - self.rename().unwrap_or(&self.name) - } - - /// Convert dependency to TOML - /// - /// Returns a tuple with the dependency's name and either the version as a `String` - /// or the path/git repository as an `InlineTable`. - /// (If the dependency is set as `optional` or `default-features` is set to `false`, - /// an `InlineTable` is returned in any case.) - /// - /// # Panic - /// - /// Panics if the path is relative - pub fn to_toml(&self, crate_root: &Path) -> toml_edit::Item { - assert!( - crate_root.is_absolute(), - "Absolute path needed, got: {}", - crate_root.display() - ); - let table: toml_edit::Item = match ( - self.optional.unwrap_or(false), - self.features.as_ref(), - self.default_features.unwrap_or(true), - self.source.as_ref(), - self.registry.as_ref(), - self.rename.as_ref(), - ) { - // Extra short when version flag only - ( - false, - None, - true, - Some(Source::Registry(RegistrySource { version: v })), - None, - None, - ) => toml_edit::value(v), - (false, None, true, Some(Source::Workspace(WorkspaceSource {})), None, None) => { - let mut table = toml_edit::InlineTable::default(); - table.set_dotted(true); - table.insert("workspace", true.into()); - toml_edit::value(toml_edit::Value::InlineTable(table)) - } - // Other cases are represented as an inline table - (_, _, _, _, _, _) => { - let mut table = toml_edit::InlineTable::default(); - - match &self.source { - Some(Source::Registry(src)) => { - table.insert("version", src.version.as_str().into()); - } - Some(Source::Path(src)) => { - let relpath = path_field(crate_root, &src.path); - if let Some(r) = src.version.as_deref() { - table.insert("version", r.into()); - } - table.insert("path", relpath.into()); - } - Some(Source::Git(src)) => { - table.insert("git", src.git.as_str().into()); - if let Some(branch) = src.branch.as_deref() { - table.insert("branch", branch.into()); - } - if let Some(tag) = src.tag.as_deref() { - table.insert("tag", tag.into()); - } - if let Some(rev) = src.rev.as_deref() { - table.insert("rev", rev.into()); - } - if let Some(r) = src.version.as_deref() { - table.insert("version", r.into()); - } - } - Some(Source::Workspace(_)) => { - table.insert("workspace", true.into()); - } - None => {} - } - if table.contains_key("version") { - if let Some(r) = self.registry.as_deref() { - table.insert("registry", r.into()); - } - } - - if self.rename.is_some() { - table.insert("package", self.name.as_str().into()); - } - if let Some(v) = self.default_features { - table.insert("default-features", v.into()); - } - if let Some(features) = self.features.as_ref() { - let features: toml_edit::Value = features.iter().cloned().collect(); - table.insert("features", features); - } - if let Some(v) = self.optional { - table.insert("optional", v.into()); - } - - toml_edit::value(toml_edit::Value::InlineTable(table)) - } - }; - - table - } - - /// Modify existing entry to match this dependency - pub fn update_toml<'k>( - &self, - crate_root: &Path, - key: &mut KeyMut<'k>, - item: &mut toml_edit::Item, - ) { - if str_or_1_len_table(item) { - // Nothing to preserve - *item = self.to_toml(crate_root); - key.fmt(); - } else if let Some(table) = item.as_table_like_mut() { - match &self.source { - Some(Source::Registry(src)) => { - table.insert("version", toml_edit::value(src.version.as_str())); - - for key in ["path", "git", "branch", "tag", "rev", "workspace"] { - table.remove(key); - } - } - Some(Source::Path(src)) => { - let relpath = path_field(crate_root, &src.path); - table.insert("path", toml_edit::value(relpath)); - if let Some(r) = src.version.as_deref() { - table.insert("version", toml_edit::value(r)); - } else { - table.remove("version"); - } - - for key in ["git", "branch", "tag", "rev", "workspace"] { - table.remove(key); - } - } - Some(Source::Git(src)) => { - table.insert("git", toml_edit::value(src.git.as_str())); - if let Some(branch) = src.branch.as_deref() { - table.insert("branch", toml_edit::value(branch)); - } else { - table.remove("branch"); - } - if let Some(tag) = src.tag.as_deref() { - table.insert("tag", toml_edit::value(tag)); - } else { - table.remove("tag"); - } - if let Some(rev) = src.rev.as_deref() { - table.insert("rev", toml_edit::value(rev)); - } else { - table.remove("rev"); - } - if let Some(r) = src.version.as_deref() { - table.insert("version", toml_edit::value(r)); - } else { - table.remove("version"); - } - - for key in ["path", "workspace"] { - table.remove(key); - } - } - Some(Source::Workspace(_)) => { - table.insert("workspace", toml_edit::value(true)); - table.set_dotted(true); - key.fmt(); - for key in [ - "version", - "registry", - "registry-index", - "path", - "git", - "branch", - "tag", - "rev", - "package", - "default-features", - ] { - table.remove(key); - } - } - None => {} - } - if table.contains_key("version") { - if let Some(r) = self.registry.as_deref() { - table.insert("registry", toml_edit::value(r)); - } else { - table.remove("registry"); - } - } else { - table.remove("registry"); - } - - if self.rename.is_some() { - table.insert("package", toml_edit::value(self.name.as_str())); - } - match self.default_features { - Some(v) => { - table.insert("default-features", toml_edit::value(v)); - } - None => { - table.remove("default-features"); - } - } - if let Some(new_features) = self.features.as_ref() { - let mut features = table - .get("features") - .and_then(|i| i.as_value()) - .and_then(|v| v.as_array()) - .and_then(|a| { - a.iter() - .map(|v| v.as_str()) - .collect::>>() - }) - .unwrap_or_default(); - features.extend(new_features.iter().map(|s| s.as_str())); - let features = toml_edit::value(features.into_iter().collect::()); - table.set_dotted(false); - table.insert("features", features); - } else { - table.remove("features"); - } - match self.optional { - Some(v) => { - table.set_dotted(false); - table.insert("optional", toml_edit::value(v)); - } - None => { - table.remove("optional"); - } - } - - table.fmt(); - } else { - unreachable!("Invalid dependency type: {}", item.type_name()); - } - } -} - -fn invalid_type(dep: &str, key: &str, actual: &str, expected: &str) -> anyhow::Error { - anyhow::format_err!("Found {actual} for {key} when {expected} was expected for {dep}") -} - -impl std::fmt::Display for Dependency { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let Some(source) = self.source() { - write!(f, "{}@{}", self.name, source) - } else { - self.toml_key().fmt(f) - } - } -} - -impl<'s> From<&'s Summary> for Dependency { - fn from(other: &'s Summary) -> Self { - let source: Source = if let Some(path) = other.source_id().local_path() { - PathSource::new(path) - .set_version(other.version().to_string()) - .into() - } else if let Some(git_ref) = other.source_id().git_reference() { - let mut src = GitSource::new(other.source_id().url().to_string()) - .set_version(other.version().to_string()); - match git_ref { - GitReference::Branch(branch) => src = src.set_branch(branch), - GitReference::Tag(tag) => src = src.set_tag(tag), - GitReference::Rev(rev) => src = src.set_rev(rev), - GitReference::DefaultBranch => {} - } - src.into() - } else { - RegistrySource::new(other.version().to_string()).into() - }; - Dependency::new(other.name().as_str()).set_source(source) - } -} - -impl From for Dependency { - fn from(other: Summary) -> Self { - (&other).into() - } -} - -fn path_field(crate_root: &Path, abs_path: &Path) -> String { - let relpath = pathdiff::diff_paths(abs_path, crate_root).expect("both paths are absolute"); - let relpath = relpath.to_str().unwrap().replace('\\', "/"); - relpath -} - -/// Primary location of a dependency -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -pub enum Source { - /// Dependency from a registry - Registry(RegistrySource), - /// Dependency from a local path - Path(PathSource), - /// Dependency from a git repo - Git(GitSource), - /// Dependency from a workspace - Workspace(WorkspaceSource), -} - -impl Source { - /// Access the registry source, if present - pub fn as_registry(&self) -> Option<&RegistrySource> { - match self { - Self::Registry(src) => Some(src), - _ => None, - } - } - - /// Access the path source, if present - #[allow(dead_code)] - pub fn as_path(&self) -> Option<&PathSource> { - match self { - Self::Path(src) => Some(src), - _ => None, - } - } - - /// Access the git source, if present - #[allow(dead_code)] - pub fn as_git(&self) -> Option<&GitSource> { - match self { - Self::Git(src) => Some(src), - _ => None, - } - } - - /// Access the workspace source, if present - #[allow(dead_code)] - pub fn as_workspace(&self) -> Option<&WorkspaceSource> { - match self { - Self::Workspace(src) => Some(src), - _ => None, - } - } -} - -impl std::fmt::Display for Source { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Registry(src) => src.fmt(f), - Self::Path(src) => src.fmt(f), - Self::Git(src) => src.fmt(f), - Self::Workspace(src) => src.fmt(f), - } - } -} - -impl<'s> From<&'s Source> for Source { - fn from(inner: &'s Source) -> Self { - inner.clone() - } -} - -impl From for Source { - fn from(inner: RegistrySource) -> Self { - Self::Registry(inner) - } -} - -impl From for Source { - fn from(inner: PathSource) -> Self { - Self::Path(inner) - } -} - -impl From for Source { - fn from(inner: GitSource) -> Self { - Self::Git(inner) - } -} - -impl From for Source { - fn from(inner: WorkspaceSource) -> Self { - Self::Workspace(inner) - } -} - -/// Dependency from a registry -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -#[non_exhaustive] -pub struct RegistrySource { - /// Version requirement - pub version: String, -} - -impl RegistrySource { - /// Specify dependency by version requirement - pub fn new(version: impl AsRef) -> Self { - // versions might have semver metadata appended which we do not want to - // store in the cargo toml files. This would cause a warning upon compilation - // ("version requirement […] includes semver metadata which will be ignored") - let version = version.as_ref().split('+').next().unwrap(); - Self { - version: version.to_owned(), - } - } -} - -impl std::fmt::Display for RegistrySource { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.version.fmt(f) - } -} - -/// Dependency from a local path -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -#[non_exhaustive] -pub struct PathSource { - /// Local, absolute path - pub path: PathBuf, - /// Version requirement for when published - pub version: Option, -} - -impl PathSource { - /// Specify dependency from a path - pub fn new(path: impl Into) -> Self { - Self { - path: path.into(), - version: None, - } - } - - /// Set an optional version requirement - pub fn set_version(mut self, version: impl AsRef) -> Self { - // versions might have semver metadata appended which we do not want to - // store in the cargo toml files. This would cause a warning upon compilation - // ("version requirement […] includes semver metadata which will be ignored") - let version = version.as_ref().split('+').next().unwrap(); - self.version = Some(version.to_owned()); - self - } - - /// Get the SourceID for this dependency - pub fn source_id(&self) -> CargoResult { - SourceId::for_path(&self.path) - } -} - -impl std::fmt::Display for PathSource { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.path.display().fmt(f) - } -} - -/// Dependency from a git repo -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -#[non_exhaustive] -pub struct GitSource { - /// Repo URL - pub git: String, - /// Select specific branch - pub branch: Option, - /// Select specific tag - pub tag: Option, - /// Select specific rev - pub rev: Option, - /// Version requirement for when published - pub version: Option, -} - -impl GitSource { - /// Specify dependency from a git repo - pub fn new(git: impl Into) -> Self { - Self { - git: git.into(), - branch: None, - tag: None, - rev: None, - version: None, - } - } - - /// Specify an optional branch - pub fn set_branch(mut self, branch: impl Into) -> Self { - self.branch = Some(branch.into()); - self.tag = None; - self.rev = None; - self - } - - /// Specify an optional tag - pub fn set_tag(mut self, tag: impl Into) -> Self { - self.branch = None; - self.tag = Some(tag.into()); - self.rev = None; - self - } - - /// Specify an optional rev - pub fn set_rev(mut self, rev: impl Into) -> Self { - self.branch = None; - self.tag = None; - self.rev = Some(rev.into()); - self - } - - /// Get the SourceID for this dependency - pub fn source_id(&self) -> CargoResult { - let git_url = self.git.parse::()?; - let git_ref = self.git_ref(); - SourceId::for_git(&git_url, git_ref) - } - - fn git_ref(&self) -> GitReference { - match ( - self.branch.as_deref(), - self.tag.as_deref(), - self.rev.as_deref(), - ) { - (Some(branch), _, _) => GitReference::Branch(branch.to_owned()), - (_, Some(tag), _) => GitReference::Tag(tag.to_owned()), - (_, _, Some(rev)) => GitReference::Rev(rev.to_owned()), - _ => GitReference::DefaultBranch, - } - } - - /// Set an optional version requirement - pub fn set_version(mut self, version: impl AsRef) -> Self { - // versions might have semver metadata appended which we do not want to - // store in the cargo toml files. This would cause a warning upon compilation - // ("version requirement […] includes semver metadata which will be ignored") - let version = version.as_ref().split('+').next().unwrap(); - self.version = Some(version.to_owned()); - self - } -} - -impl std::fmt::Display for GitSource { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let git_ref = self.git_ref(); - if let Some(pretty_ref) = git_ref.pretty_ref() { - write!(f, "{}?{}", self.git, pretty_ref) - } else { - write!(f, "{}", self.git) - } - } -} - -/// Dependency from a workspace -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -#[non_exhaustive] -pub struct WorkspaceSource; - -impl WorkspaceSource { - pub fn new() -> Self { - Self - } -} - -impl Display for WorkspaceSource { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - "workspace".fmt(f) - } -} - -#[cfg(test)] -mod tests { - use std::path::Path; - - use crate::ops::cargo_add::manifest::LocalManifest; - use cargo_util::paths; - - use super::*; - - #[test] - fn to_toml_simple_dep() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0")); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_simple_dep_with_version() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0")); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - assert_eq!(item.as_str(), Some("1.0")); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_optional_dep() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep") - .set_source(RegistrySource::new("1.0")) - .set_optional(true); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!(dep.get("optional").unwrap().as_bool(), Some(true)); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_dep_without_default_features() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep") - .set_source(RegistrySource::new("1.0")) - .set_default_features(false); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false)); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_dep_with_path_source() { - let root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let crate_root = root.join("foo"); - let dep = Dependency::new("dep").set_source(PathSource::new(root.join("bar"))); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!(dep.get("path").unwrap().as_str(), Some("../bar")); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_dep_with_git_source() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep").set_source(GitSource::new("https://foor/bar.git")); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!( - dep.get("git").unwrap().as_str(), - Some("https://foor/bar.git") - ); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_renamed_dep() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep") - .set_source(RegistrySource::new("1.0")) - .set_rename("d"); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "d".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!(dep.get("package").unwrap().as_str(), Some("dep")); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_dep_from_alt_registry() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep") - .set_source(RegistrySource::new("1.0")) - .set_registry("alternative"); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "dep".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!(dep.get("registry").unwrap().as_str(), Some("alternative")); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn to_toml_complex_dep() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let dep = Dependency::new("dep") - .set_source(RegistrySource::new("1.0")) - .set_default_features(false) - .set_rename("d"); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - assert_eq!(key, "d".to_owned()); - assert!(item.is_inline_table()); - - let dep = item.as_inline_table().unwrap(); - assert_eq!(dep.get("package").unwrap().as_str(), Some("dep")); - assert_eq!(dep.get("version").unwrap().as_str(), Some("1.0")); - assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false)); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn paths_with_forward_slashes_are_left_as_is() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let path = crate_root.join("sibling/crate"); - let relpath = "sibling/crate"; - let dep = Dependency::new("dep").set_source(PathSource::new(path)); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - let table = item.as_inline_table().unwrap(); - let got = table.get("path").unwrap().as_str().unwrap(); - assert_eq!(got, relpath); - - verify_roundtrip(&crate_root, key, &item); - } - - #[test] - fn overwrite_with_workspace_source_fmt_key() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("./"))); - let toml = "dep = \"1.0\"\n"; - let manifest = toml.parse().unwrap(); - let mut local = LocalManifest { - path: crate_root.clone(), - manifest, - }; - assert_eq!(local.manifest.to_string(), toml); - for (key, item) in local.data.clone().iter() { - let dep = Dependency::from_toml(&crate_root, key, item).unwrap(); - let dep = dep.set_source(WorkspaceSource::new()); - local.insert_into_table(&vec![], &dep).unwrap(); - assert_eq!(local.data.to_string(), "dep.workspace = true\n"); - } - } - - #[test] - #[cfg(windows)] - fn normalise_windows_style_paths() { - let crate_root = - paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); - let original = crate_root.join(r"sibling\crate"); - let should_be = "sibling/crate"; - let dep = Dependency::new("dep").set_source(PathSource::new(original)); - let key = dep.toml_key(); - let item = dep.to_toml(&crate_root); - - let table = item.as_inline_table().unwrap(); - let got = table.get("path").unwrap().as_str().unwrap(); - assert_eq!(got, should_be); - - verify_roundtrip(&crate_root, key, &item); - } - - #[track_caller] - fn verify_roundtrip(crate_root: &Path, key: &str, item: &toml_edit::Item) { - let roundtrip = Dependency::from_toml(crate_root, key, item).unwrap(); - let round_key = roundtrip.toml_key(); - let round_item = roundtrip.to_toml(crate_root); - assert_eq!(key, round_key); - assert_eq!(item.to_string(), round_item.to_string()); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/manifest.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/manifest.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/manifest.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/manifest.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,516 +0,0 @@ -use std::ops::{Deref, DerefMut}; -use std::path::{Path, PathBuf}; -use std::str; - -use anyhow::Context as _; - -use super::dependency::Dependency; -use crate::core::dependency::DepKind; -use crate::core::FeatureValue; -use crate::util::interning::InternedString; -use crate::CargoResult; - -/// Dependency table to add dep to -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct DepTable { - kind: DepKind, - target: Option, -} - -impl DepTable { - const KINDS: &'static [Self] = &[ - Self::new().set_kind(DepKind::Normal), - Self::new().set_kind(DepKind::Development), - Self::new().set_kind(DepKind::Build), - ]; - - /// Reference to a Dependency Table - pub const fn new() -> Self { - Self { - kind: DepKind::Normal, - target: None, - } - } - - /// Choose the type of dependency - pub const fn set_kind(mut self, kind: DepKind) -> Self { - self.kind = kind; - self - } - - /// Choose the platform for the dependency - pub fn set_target(mut self, target: impl Into) -> Self { - self.target = Some(target.into()); - self - } - - /// Type of dependency - pub fn kind(&self) -> DepKind { - self.kind - } - - /// Platform for the dependency - pub fn target(&self) -> Option<&str> { - self.target.as_deref() - } - - /// Keys to the table - pub fn to_table(&self) -> Vec<&str> { - if let Some(target) = &self.target { - vec!["target", target, self.kind_table()] - } else { - vec![self.kind_table()] - } - } - - fn kind_table(&self) -> &str { - match self.kind { - DepKind::Normal => "dependencies", - DepKind::Development => "dev-dependencies", - DepKind::Build => "build-dependencies", - } - } -} - -impl Default for DepTable { - fn default() -> Self { - Self::new() - } -} - -impl From for DepTable { - fn from(other: DepKind) -> Self { - Self::new().set_kind(other) - } -} - -/// A Cargo manifest -#[derive(Debug, Clone)] -pub struct Manifest { - /// Manifest contents as TOML data - pub data: toml_edit::Document, -} - -impl Manifest { - /// Get the manifest's package name - pub fn package_name(&self) -> CargoResult<&str> { - self.data - .as_table() - .get("package") - .and_then(|m| m.get("name")) - .and_then(|m| m.as_str()) - .ok_or_else(parse_manifest_err) - } - - /// Get the specified table from the manifest. - pub fn get_table<'a>(&'a self, table_path: &[String]) -> CargoResult<&'a toml_edit::Item> { - /// Descend into a manifest until the required table is found. - fn descend<'a>( - input: &'a toml_edit::Item, - path: &[String], - ) -> CargoResult<&'a toml_edit::Item> { - if let Some(segment) = path.get(0) { - let value = input - .get(&segment) - .ok_or_else(|| non_existent_table_err(segment))?; - - if value.is_table_like() { - descend(value, &path[1..]) - } else { - Err(non_existent_table_err(segment)) - } - } else { - Ok(input) - } - } - - descend(self.data.as_item(), table_path) - } - - /// Get the specified table from the manifest. - pub fn get_table_mut<'a>( - &'a mut self, - table_path: &[String], - ) -> CargoResult<&'a mut toml_edit::Item> { - /// Descend into a manifest until the required table is found. - fn descend<'a>( - input: &'a mut toml_edit::Item, - path: &[String], - ) -> CargoResult<&'a mut toml_edit::Item> { - if let Some(segment) = path.get(0) { - let mut default_table = toml_edit::Table::new(); - default_table.set_implicit(true); - let value = input[&segment].or_insert(toml_edit::Item::Table(default_table)); - - if value.is_table_like() { - descend(value, &path[1..]) - } else { - Err(non_existent_table_err(segment)) - } - } else { - Ok(input) - } - } - - descend(self.data.as_item_mut(), table_path) - } - - /// Get all sections in the manifest that exist and might contain dependencies. - /// The returned items are always `Table` or `InlineTable`. - pub fn get_sections(&self) -> Vec<(DepTable, toml_edit::Item)> { - let mut sections = Vec::new(); - - for table in DepTable::KINDS { - let dependency_type = table.kind_table(); - // Dependencies can be in the three standard sections... - if self - .data - .get(dependency_type) - .map(|t| t.is_table_like()) - .unwrap_or(false) - { - sections.push((table.clone(), self.data[dependency_type].clone())) - } - - // ... and in `target..(build-/dev-)dependencies`. - let target_sections = self - .data - .as_table() - .get("target") - .and_then(toml_edit::Item::as_table_like) - .into_iter() - .flat_map(toml_edit::TableLike::iter) - .filter_map(|(target_name, target_table)| { - let dependency_table = target_table.get(dependency_type)?; - dependency_table.as_table_like().map(|_| { - ( - table.clone().set_target(target_name), - dependency_table.clone(), - ) - }) - }); - - sections.extend(target_sections); - } - - sections - } - - pub fn get_legacy_sections(&self) -> Vec { - let mut result = Vec::new(); - - for dependency_type in ["dev_dependencies", "build_dependencies"] { - if self.data.contains_key(dependency_type) { - result.push(dependency_type.to_owned()); - } - - // ... and in `target..(build-/dev-)dependencies`. - result.extend( - self.data - .as_table() - .get("target") - .and_then(toml_edit::Item::as_table_like) - .into_iter() - .flat_map(toml_edit::TableLike::iter) - .filter_map(|(target_name, target_table)| { - if target_table.as_table_like()?.contains_key(dependency_type) { - Some(format!("target.{target_name}.{dependency_type}")) - } else { - None - } - }), - ); - } - result - } -} - -impl str::FromStr for Manifest { - type Err = anyhow::Error; - - /// Read manifest data from string - fn from_str(input: &str) -> ::std::result::Result { - let d: toml_edit::Document = input.parse().context("Manifest not valid TOML")?; - - Ok(Manifest { data: d }) - } -} - -impl std::fmt::Display for Manifest { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.data.fmt(f) - } -} - -/// A Cargo manifest that is available locally. -#[derive(Debug)] -pub struct LocalManifest { - /// Path to the manifest - pub path: PathBuf, - /// Manifest contents - pub manifest: Manifest, -} - -impl Deref for LocalManifest { - type Target = Manifest; - - fn deref(&self) -> &Manifest { - &self.manifest - } -} - -impl DerefMut for LocalManifest { - fn deref_mut(&mut self) -> &mut Manifest { - &mut self.manifest - } -} - -impl LocalManifest { - /// Construct the `LocalManifest` corresponding to the `Path` provided. - pub fn try_new(path: &Path) -> CargoResult { - if !path.is_absolute() { - anyhow::bail!("can only edit absolute paths, got {}", path.display()); - } - let data = cargo_util::paths::read(&path)?; - let manifest = data.parse().context("Unable to parse Cargo.toml")?; - Ok(LocalManifest { - manifest, - path: path.to_owned(), - }) - } - - /// Write changes back to the file - pub fn write(&self) -> CargoResult<()> { - if !self.manifest.data.contains_key("package") - && !self.manifest.data.contains_key("project") - { - if self.manifest.data.contains_key("workspace") { - anyhow::bail!( - "found virtual manifest at {}, but this command requires running against an \ - actual package in this workspace.", - self.path.display() - ); - } else { - anyhow::bail!( - "missing expected `package` or `project` fields in {}", - self.path.display() - ); - } - } - - let s = self.manifest.data.to_string(); - let new_contents_bytes = s.as_bytes(); - - cargo_util::paths::write(&self.path, new_contents_bytes) - } - - /// Lookup a dependency - pub fn get_dependency_versions<'s>( - &'s self, - dep_key: &'s str, - ) -> impl Iterator)> + 's { - let crate_root = self.path.parent().expect("manifest path is absolute"); - self.get_sections() - .into_iter() - .filter_map(move |(table_path, table)| { - let table = table.into_table().ok()?; - Some( - table - .into_iter() - .filter_map(|(key, item)| { - if key.as_str() == dep_key { - Some((table_path.clone(), key, item)) - } else { - None - } - }) - .collect::>(), - ) - }) - .flatten() - .map(move |(table_path, dep_key, dep_item)| { - let dep = Dependency::from_toml(crate_root, &dep_key, &dep_item); - (table_path, dep) - }) - } - - /// Add entry to a Cargo.toml. - pub fn insert_into_table( - &mut self, - table_path: &[String], - dep: &Dependency, - ) -> CargoResult<()> { - let crate_root = self - .path - .parent() - .expect("manifest path is absolute") - .to_owned(); - let dep_key = dep.toml_key(); - - let table = self.get_table_mut(table_path)?; - if let Some((mut dep_key, dep_item)) = table - .as_table_like_mut() - .unwrap() - .get_key_value_mut(dep_key) - { - dep.update_toml(&crate_root, &mut dep_key, dep_item); - } else { - let new_dependency = dep.to_toml(&crate_root); - table[dep_key] = new_dependency; - } - if let Some(t) = table.as_inline_table_mut() { - t.fmt() - } - - Ok(()) - } - - /// Remove references to `dep_key` if its no longer present - pub fn gc_dep(&mut self, dep_key: &str) { - let explicit_dep_activation = self.is_explicit_dep_activation(dep_key); - let status = self.dep_status(dep_key); - - if let Some(toml_edit::Item::Table(feature_table)) = - self.data.as_table_mut().get_mut("features") - { - for (_feature, mut feature_values) in feature_table.iter_mut() { - if let toml_edit::Item::Value(toml_edit::Value::Array(feature_values)) = - &mut feature_values - { - fix_feature_activations( - feature_values, - dep_key, - status, - explicit_dep_activation, - ); - } - } - } - } - - fn is_explicit_dep_activation(&self, dep_key: &str) -> bool { - if let Some(toml_edit::Item::Table(feature_table)) = self.data.as_table().get("features") { - for values in feature_table - .iter() - .map(|(_, a)| a) - .filter_map(|i| i.as_value()) - .filter_map(|v| v.as_array()) - { - for value in values.iter().filter_map(|v| v.as_str()) { - let value = FeatureValue::new(InternedString::new(value)); - if let FeatureValue::Dep { dep_name } = &value { - if dep_name.as_str() == dep_key { - return true; - } - } - } - } - } - - false - } - - fn dep_status(&self, dep_key: &str) -> DependencyStatus { - let mut status = DependencyStatus::None; - for (_, tbl) in self.get_sections() { - if let toml_edit::Item::Table(tbl) = tbl { - if let Some(dep_item) = tbl.get(dep_key) { - let optional = dep_item - .get("optional") - .and_then(|i| i.as_value()) - .and_then(|i| i.as_bool()) - .unwrap_or(false); - if optional { - return DependencyStatus::Optional; - } else { - status = DependencyStatus::Required; - } - } - } - } - status - } -} - -impl std::fmt::Display for LocalManifest { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.manifest.fmt(f) - } -} - -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -enum DependencyStatus { - None, - Optional, - Required, -} - -fn fix_feature_activations( - feature_values: &mut toml_edit::Array, - dep_key: &str, - status: DependencyStatus, - explicit_dep_activation: bool, -) { - let remove_list: Vec = feature_values - .iter() - .enumerate() - .filter_map(|(idx, value)| value.as_str().map(|s| (idx, s))) - .filter_map(|(idx, value)| { - let parsed_value = FeatureValue::new(InternedString::new(value)); - match status { - DependencyStatus::None => match (parsed_value, explicit_dep_activation) { - (FeatureValue::Feature(dep_name), false) - | (FeatureValue::Dep { dep_name }, _) - | (FeatureValue::DepFeature { dep_name, .. }, _) => dep_name == dep_key, - _ => false, - }, - DependencyStatus::Optional => false, - DependencyStatus::Required => match (parsed_value, explicit_dep_activation) { - (FeatureValue::Feature(dep_name), false) - | (FeatureValue::Dep { dep_name }, _) => dep_name == dep_key, - (FeatureValue::Feature(_), true) | (FeatureValue::DepFeature { .. }, _) => { - false - } - }, - } - .then(|| idx) - }) - .collect(); - - // Remove found idx in revers order so we don't invalidate the idx. - for idx in remove_list.iter().rev() { - feature_values.remove(*idx); - } - - if status == DependencyStatus::Required { - for value in feature_values.iter_mut() { - let parsed_value = if let Some(value) = value.as_str() { - FeatureValue::new(InternedString::new(value)) - } else { - continue; - }; - if let FeatureValue::DepFeature { - dep_name, - dep_feature, - weak, - } = parsed_value - { - if dep_name == dep_key && weak { - *value = format!("{dep_name}/{dep_feature}").into(); - } - } - } - } -} - -pub fn str_or_1_len_table(item: &toml_edit::Item) -> bool { - item.is_str() || item.as_table_like().map(|t| t.len() == 1).unwrap_or(false) -} - -fn parse_manifest_err() -> anyhow::Error { - anyhow::format_err!("unable to parse external Cargo.toml") -} - -fn non_existent_table_err(table: impl std::fmt::Display) -> anyhow::Error { - anyhow::format_err!("the table `{table}` could not be found.") -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_add/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,17 +1,17 @@ //! Core of cargo-add command mod crate_spec; -mod dependency; -mod manifest; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::VecDeque; +use std::fmt::Write; use std::path::Path; use anyhow::Context as _; use cargo_util::paths; use indexmap::IndexSet; +use itertools::Itertools; use termcolor::Color::Green; use termcolor::Color::Red; use termcolor::ColorSpec; @@ -26,18 +26,17 @@ use crate::core::Shell; use crate::core::Summary; use crate::core::Workspace; +use crate::util::toml_mut::dependency::Dependency; +use crate::util::toml_mut::dependency::GitSource; +use crate::util::toml_mut::dependency::MaybeWorkspace; +use crate::util::toml_mut::dependency::PathSource; +use crate::util::toml_mut::dependency::Source; +use crate::util::toml_mut::dependency::WorkspaceSource; +use crate::util::toml_mut::manifest::DepTable; +use crate::util::toml_mut::manifest::LocalManifest; use crate::CargoResult; use crate::Config; use crate_spec::CrateSpec; -use dependency::Dependency; -use dependency::GitSource; -use dependency::PathSource; -use dependency::RegistrySource; -use dependency::Source; -use manifest::LocalManifest; - -use crate::ops::cargo_add::dependency::{MaybeWorkspace, WorkspaceSource}; -pub use manifest::DepTable; /// Information on what dependencies should be added #[derive(Clone, Debug)] @@ -102,7 +101,7 @@ table_option.map_or(true, |table| is_sorted(table.iter().map(|(name, _)| name))) }); for dep in deps { - print_msg(&mut options.config.shell(), &dep, &dep_table)?; + print_action_msg(&mut options.config.shell(), &dep, &dep_table)?; if let Some(Source::Path(src)) = dep.source() { if src.path == manifest.path.parent().unwrap_or_else(|| Path::new("")) { anyhow::bail!( @@ -127,11 +126,63 @@ inherited_features.iter().map(|s| s.as_str()).collect(); unknown_features.extend(inherited_features.difference(&available_features).copied()); } + unknown_features.sort(); + if !unknown_features.is_empty() { - anyhow::bail!("unrecognized features: {unknown_features:?}"); + let (mut activated, mut deactivated) = dep.features(); + // Since the unknown features have been added to the DependencyUI we need to remove + // them to present the "correct" features that can be specified for the crate. + deactivated.retain(|f| !unknown_features.contains(f)); + activated.retain(|f| !unknown_features.contains(f)); + + let mut message = format!( + "unrecognized feature{} for crate {}: {}\n", + if unknown_features.len() == 1 { "" } else { "s" }, + dep.name, + unknown_features.iter().format(", "), + ); + if activated.is_empty() && deactivated.is_empty() { + write!(message, "no features available for crate {}", dep.name)?; + } else { + if !deactivated.is_empty() { + writeln!( + message, + "disabled features:\n {}", + deactivated + .iter() + .map(|s| s.to_string()) + .coalesce(|x, y| if x.len() + y.len() < 78 { + Ok(format!("{x}, {y}")) + } else { + Err((x, y)) + }) + .into_iter() + .format("\n ") + )? + } + if !activated.is_empty() { + writeln!( + message, + "enabled features:\n {}", + activated + .iter() + .map(|s| s.to_string()) + .coalesce(|x, y| if x.len() + y.len() < 78 { + Ok(format!("{x}, {y}")) + } else { + Err((x, y)) + }) + .into_iter() + .format("\n ") + )? + } + } + anyhow::bail!(message.trim().to_owned()); } + print_dep_table_msg(&mut options.config.shell(), &dep)?; + manifest.insert_into_table(&dep_table, &dep)?; manifest.gc_dep(dep.toml_key()); } @@ -637,6 +688,42 @@ }) .collect(); } + + fn features(&self) -> (IndexSet<&str>, IndexSet<&str>) { + let mut activated: IndexSet<_> = + self.features.iter().flatten().map(|s| s.as_str()).collect(); + if self.default_features().unwrap_or(true) { + activated.insert("default"); + } + activated.extend(self.inherited_features.iter().flatten().map(|s| s.as_str())); + let mut walk: VecDeque<_> = activated.iter().cloned().collect(); + while let Some(next) = walk.pop_front() { + walk.extend( + self.available_features + .get(next) + .into_iter() + .flatten() + .map(|s| s.as_str()), + ); + activated.extend( + self.available_features + .get(next) + .into_iter() + .flatten() + .map(|s| s.as_str()), + ); + } + activated.remove("default"); + activated.sort(); + let mut deactivated = self + .available_features + .keys() + .filter(|f| !activated.contains(f.as_str()) && *f != "default") + .map(|f| f.as_str()) + .collect::>(); + deactivated.sort(); + (activated, deactivated) + } } impl<'s> From<&'s Summary> for DependencyUI { @@ -700,9 +787,7 @@ Ok(dependency) } -fn print_msg(shell: &mut Shell, dep: &DependencyUI, section: &[String]) -> CargoResult<()> { - use std::fmt::Write; - +fn print_action_msg(shell: &mut Shell, dep: &DependencyUI, section: &[String]) -> CargoResult<()> { if matches!(shell.verbosity(), crate::core::shell::Verbosity::Quiet) { return Ok(()); } @@ -739,38 +824,14 @@ }; write!(message, " {section}")?; write!(message, ".")?; - shell.status("Adding", message)?; + shell.status("Adding", message) +} - let mut activated: IndexSet<_> = dep.features.iter().flatten().map(|s| s.as_str()).collect(); - if dep.default_features().unwrap_or(true) { - activated.insert("default"); - } - activated.extend(dep.inherited_features.iter().flatten().map(|s| s.as_str())); - let mut walk: VecDeque<_> = activated.iter().cloned().collect(); - while let Some(next) = walk.pop_front() { - walk.extend( - dep.available_features - .get(next) - .into_iter() - .flatten() - .map(|s| s.as_str()), - ); - activated.extend( - dep.available_features - .get(next) - .into_iter() - .flatten() - .map(|s| s.as_str()), - ); +fn print_dep_table_msg(shell: &mut Shell, dep: &DependencyUI) -> CargoResult<()> { + if matches!(shell.verbosity(), crate::core::shell::Verbosity::Quiet) { + return Ok(()); } - activated.remove("default"); - activated.sort(); - let mut deactivated = dep - .available_features - .keys() - .filter(|f| !activated.contains(f.as_str()) && *f != "default") - .collect::>(); - deactivated.sort(); + let (activated, deactivated) = dep.features(); if !activated.is_empty() || !deactivated.is_empty() { let prefix = format!("{:>13}", " "); let suffix = if let Some(version) = &dep.available_version { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/compile_filter.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/compile_filter.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/compile_filter.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/compile_filter.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,302 @@ +//! Filters and their rules to select which Cargo targets will be built. + +use crate::core::compiler::CompileMode; +use crate::core::{Target, TargetKind}; +use crate::util::restricted_names::is_glob_pattern; + +#[derive(Debug, PartialEq, Eq)] +/// Indicates whether or not the library target gets included. +pub enum LibRule { + /// Include the library, fail if not present + True, + /// Include the library if present + Default, + /// Exclude the library + False, +} + +#[derive(Debug)] +/// Indicates which Cargo targets will be selected to be built. +pub enum FilterRule { + /// All included. + All, + /// Just a subset of Cargo targets based on names given. + Just(Vec), +} + +/// Filter to apply to the root package to select which Cargo targets will be built. +/// (examples, bins, benches, tests, ...) +/// +/// The actual filter process happens inside [`generate_targets`]. +/// +/// Not to be confused with [`Packages`], which opts in packages to be built. +/// +/// [`generate_targets`]: super::generate_targets +/// [`Packages`]: crate::ops::Packages +#[derive(Debug)] +pub enum CompileFilter { + /// The default set of Cargo targets. + Default { + /// Flag whether targets can be safely skipped when required-features are not satisfied. + required_features_filterable: bool, + }, + /// Only includes a subset of all Cargo targets. + Only { + /// Include all Cargo targets. + all_targets: bool, + lib: LibRule, + bins: FilterRule, + examples: FilterRule, + tests: FilterRule, + benches: FilterRule, + }, +} + +impl FilterRule { + pub fn new(targets: Vec, all: bool) -> FilterRule { + if all { + FilterRule::All + } else { + FilterRule::Just(targets) + } + } + + /// Creates a filter with no rule. + /// + /// In the current Cargo implementation, filter without a rule implies + /// Cargo will follows the default behaviour to filter targets. + pub fn none() -> FilterRule { + FilterRule::Just(Vec::new()) + } + + /// Checks if a target definition matches this filter rule. + fn matches(&self, target: &Target) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()), + } + } + + /// Check if a filter is specific. + /// + /// Only filters without rules are considered as not specific. + fn is_specific(&self) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(ref targets) => !targets.is_empty(), + } + } + + /// Checks if any specified target name contains glob patterns. + pub(crate) fn contains_glob_patterns(&self) -> bool { + match self { + FilterRule::All => false, + FilterRule::Just(targets) => targets.iter().any(is_glob_pattern), + } + } +} + +impl CompileFilter { + /// Constructs a filter from raw command line arguments. + pub fn from_raw_arguments( + lib_only: bool, + bins: Vec, + all_bins: bool, + tsts: Vec, + all_tsts: bool, + exms: Vec, + all_exms: bool, + bens: Vec, + all_bens: bool, + all_targets: bool, + ) -> CompileFilter { + if all_targets { + return CompileFilter::new_all_targets(); + } + let rule_lib = if lib_only { + LibRule::True + } else { + LibRule::False + }; + let rule_bins = FilterRule::new(bins, all_bins); + let rule_tsts = FilterRule::new(tsts, all_tsts); + let rule_exms = FilterRule::new(exms, all_exms); + let rule_bens = FilterRule::new(bens, all_bens); + + CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens) + } + + /// Constructs a filter from underlying primitives. + pub fn new( + rule_lib: LibRule, + rule_bins: FilterRule, + rule_tsts: FilterRule, + rule_exms: FilterRule, + rule_bens: FilterRule, + ) -> CompileFilter { + if rule_lib == LibRule::True + || rule_bins.is_specific() + || rule_tsts.is_specific() + || rule_exms.is_specific() + || rule_bens.is_specific() + { + CompileFilter::Only { + all_targets: false, + lib: rule_lib, + bins: rule_bins, + examples: rule_exms, + benches: rule_bens, + tests: rule_tsts, + } + } else { + CompileFilter::Default { + required_features_filterable: true, + } + } + } + + /// Constructs a filter that includes all targets. + pub fn new_all_targets() -> CompileFilter { + CompileFilter::Only { + all_targets: true, + lib: LibRule::Default, + bins: FilterRule::All, + examples: FilterRule::All, + benches: FilterRule::All, + tests: FilterRule::All, + } + } + + /// Constructs a filter that includes all test targets. + /// + /// Being different from the behavior of [`CompileFilter::Default`], this + /// function only recognizes test targets, which means cargo might compile + /// all targets with `tested` flag on, whereas [`CompileFilter::Default`] + /// may include additional example targets to ensure they can be compiled. + /// + /// Note that the actual behavior is subject to `filter_default_targets` + /// and `generate_targets` though. + pub fn all_test_targets() -> Self { + Self::Only { + all_targets: false, + lib: LibRule::Default, + bins: FilterRule::none(), + examples: FilterRule::none(), + tests: FilterRule::All, + benches: FilterRule::none(), + } + } + + /// Constructs a filter that includes lib target only. + pub fn lib_only() -> Self { + Self::Only { + all_targets: false, + lib: LibRule::True, + bins: FilterRule::none(), + examples: FilterRule::none(), + tests: FilterRule::none(), + benches: FilterRule::none(), + } + } + + /// Constructs a filter that includes the given binary. No more. No less. + pub fn single_bin(bin: String) -> Self { + Self::Only { + all_targets: false, + lib: LibRule::False, + bins: FilterRule::new(vec![bin], false), + examples: FilterRule::none(), + tests: FilterRule::none(), + benches: FilterRule::none(), + } + } + + /// Indicates if Cargo needs to build any dev dependency. + pub fn need_dev_deps(&self, mode: CompileMode) -> bool { + match mode { + CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true, + CompileMode::Check { test: true } => true, + CompileMode::Build + | CompileMode::Doc { .. } + | CompileMode::Docscrape + | CompileMode::Check { test: false } => match *self { + CompileFilter::Default { .. } => false, + CompileFilter::Only { + ref examples, + ref tests, + ref benches, + .. + } => examples.is_specific() || tests.is_specific() || benches.is_specific(), + }, + CompileMode::RunCustomBuild => panic!("Invalid mode"), + } + } + + /// Selects targets for "cargo run". for logic to select targets for other + /// subcommands, see `generate_targets` and `filter_default_targets`. + pub fn target_run(&self, target: &Target) -> bool { + match *self { + CompileFilter::Default { .. } => true, + CompileFilter::Only { + ref lib, + ref bins, + ref examples, + ref tests, + ref benches, + .. + } => { + let rule = match *target.kind() { + TargetKind::Bin => bins, + TargetKind::Test => tests, + TargetKind::Bench => benches, + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples, + TargetKind::Lib(..) => { + return match *lib { + LibRule::True => true, + LibRule::Default => true, + LibRule::False => false, + }; + } + TargetKind::CustomBuild => return false, + }; + rule.matches(target) + } + } + } + + pub fn is_specific(&self) -> bool { + match *self { + CompileFilter::Default { .. } => false, + CompileFilter::Only { .. } => true, + } + } + + pub fn is_all_targets(&self) -> bool { + matches!( + *self, + CompileFilter::Only { + all_targets: true, + .. + } + ) + } + + /// Checks if any specified target name contains glob patterns. + pub(crate) fn contains_glob_patterns(&self) -> bool { + match self { + CompileFilter::Default { .. } => false, + CompileFilter::Only { + bins, + examples, + tests, + benches, + .. + } => { + bins.contains_glob_patterns() + || examples.contains_glob_patterns() + || tests.contains_glob_patterns() + || benches.contains_glob_patterns() + } + } + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,1460 @@ +//! # The Cargo "compile" operation +//! +//! This module contains the entry point for starting the compilation process +//! for commands like `build`, `test`, `doc`, `rustc`, etc. +//! +//! The [`compile`] function will do all the work to compile a workspace. A +//! rough outline is: +//! +//! - Resolve the dependency graph (see [`ops::resolve`]). +//! - Download any packages needed (see [`PackageSet`]). +//! - Generate a list of top-level "units" of work for the targets the user +//! requested on the command-line. Each [`Unit`] corresponds to a compiler +//! invocation. This is done in this module ([`generate_targets`]). +//! - Build the graph of `Unit` dependencies (see [`unit_dependencies`]). +//! - Create a [`Context`] which will perform the following steps: +//! - Prepare the `target` directory (see [`Layout`]). +//! - Create a job queue (see `JobQueue`). The queue checks the +//! fingerprint of each `Unit` to determine if it should run or be +//! skipped. +//! - Execute the queue. Each leaf in the queue's dependency graph is +//! executed, and then removed from the graph when finished. This +//! repeats until the queue is empty. +//! +//! **Note**: "target" inside this module generally refers to ["Cargo Target"], +//! which corresponds to artifact that will be built in a package. Not to be +//! confused with target-triple or target architecture. +//! +//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies +//! [`Layout`]: crate::core::compiler::Layout +//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html + +use std::collections::{HashMap, HashSet}; +use std::fmt::Write; +use std::hash::{Hash, Hasher}; +use std::sync::Arc; + +use crate::core::compiler::unit_dependencies::{build_unit_dependencies, IsArtifact}; +use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; +use crate::core::compiler::{standard_lib, CrateType, TargetInfo}; +use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; +use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; +use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; +use crate::core::profiles::{Profiles, UnitFor}; +use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; +use crate::core::resolver::{HasDevUnits, Resolve}; +use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target}; +use crate::core::{PackageId, SourceId, TargetKind, Workspace}; +use crate::drop_println; +use crate::ops; +use crate::ops::resolve::WorkspaceResolve; +use crate::util::config::Config; +use crate::util::interning::InternedString; +use crate::util::restricted_names::is_glob_pattern; +use crate::util::{closest_msg, profile, CargoResult, StableHasher}; + +mod compile_filter; +pub use compile_filter::{CompileFilter, FilterRule, LibRule}; + +mod packages; +use packages::build_glob; +pub use packages::Packages; + +/// Contains information about how a package should be compiled. +/// +/// Note on distinction between `CompileOptions` and [`BuildConfig`]: +/// `BuildConfig` contains values that need to be retained after +/// [`BuildContext`] is created. The other fields are no longer necessary. Think +/// of it as `CompileOptions` are high-level settings requested on the +/// command-line, and `BuildConfig` are low-level settings for actually +/// driving `rustc`. +#[derive(Debug)] +pub struct CompileOptions { + /// Configuration information for a rustc build + pub build_config: BuildConfig, + /// Feature flags requested by the user. + pub cli_features: CliFeatures, + /// A set of packages to build. + pub spec: Packages, + /// Filter to apply to the root package to select which targets will be + /// built. + pub filter: CompileFilter, + /// Extra arguments to be passed to rustdoc (single target only) + pub target_rustdoc_args: Option>, + /// The specified target will be compiled with all the available arguments, + /// note that this only accounts for the *final* invocation of rustc + pub target_rustc_args: Option>, + /// Crate types to be passed to rustc (single target only) + pub target_rustc_crate_types: Option>, + /// Whether the `--document-private-items` flags was specified and should + /// be forwarded to `rustdoc`. + pub rustdoc_document_private_items: bool, + /// Whether the build process should check the minimum Rust version + /// defined in the cargo metadata for a crate. + pub honor_rust_version: bool, +} + +impl CompileOptions { + pub fn new(config: &Config, mode: CompileMode) -> CargoResult { + let jobs = None; + let keep_going = false; + Ok(CompileOptions { + build_config: BuildConfig::new(config, jobs, keep_going, &[], mode)?, + cli_features: CliFeatures::new_all(false), + spec: ops::Packages::Packages(Vec::new()), + filter: CompileFilter::Default { + required_features_filterable: false, + }, + target_rustdoc_args: None, + target_rustc_args: None, + target_rustc_crate_types: None, + rustdoc_document_private_items: false, + honor_rust_version: true, + }) + } +} + +/// Compiles! +/// +/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`]. +pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult> { + let exec: Arc = Arc::new(DefaultExecutor); + compile_with_exec(ws, options, &exec) +} + +/// Like [`compile`] but allows specifying a custom [`Executor`] +/// that will be able to intercept build calls and add custom logic. +/// +/// [`compile`] uses [`DefaultExecutor`] which just passes calls through. +pub fn compile_with_exec<'a>( + ws: &Workspace<'a>, + options: &CompileOptions, + exec: &Arc, +) -> CargoResult> { + ws.emit_warnings()?; + compile_ws(ws, options, exec) +} + +/// Like [`compile_with_exec`] but without warnings from manifest parsing. +pub fn compile_ws<'a>( + ws: &Workspace<'a>, + options: &CompileOptions, + exec: &Arc, +) -> CargoResult> { + let interner = UnitInterner::new(); + let bcx = create_bcx(ws, options, &interner)?; + if options.build_config.unit_graph { + unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?; + return Compilation::new(&bcx); + } + let _p = profile::start("compiling"); + let cx = Context::new(&bcx)?; + cx.compile(exec) +} + +/// Executes `rustc --print `. +/// +/// * `print_opt_value` is the VALUE passed through. +pub fn print<'a>( + ws: &Workspace<'a>, + options: &CompileOptions, + print_opt_value: &str, +) -> CargoResult<()> { + let CompileOptions { + ref build_config, + ref target_rustc_args, + .. + } = *options; + let config = ws.config(); + let rustc = config.load_global_rustc(Some(ws))?; + for (index, kind) in build_config.requested_kinds.iter().enumerate() { + if index != 0 { + drop_println!(config); + } + let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?; + let mut process = rustc.process(); + process.args(&target_info.rustflags); + if let Some(args) = target_rustc_args { + process.args(args); + } + if let CompileKind::Target(t) = kind { + process.arg("--target").arg(t.short_name()); + } + process.arg("--print").arg(print_opt_value); + process.exec()?; + } + Ok(()) +} + +/// Prepares all required information for the actual compilation. +/// +/// For how it works and what data it collects, +/// please see the [module-level documentation](self). +pub fn create_bcx<'a, 'cfg>( + ws: &'a Workspace<'cfg>, + options: &'a CompileOptions, + interner: &'a UnitInterner, +) -> CargoResult> { + let CompileOptions { + ref build_config, + ref spec, + ref cli_features, + ref filter, + ref target_rustdoc_args, + ref target_rustc_args, + ref target_rustc_crate_types, + rustdoc_document_private_items, + honor_rust_version, + } = *options; + let config = ws.config(); + + // Perform some pre-flight validation. + match build_config.mode { + CompileMode::Test + | CompileMode::Build + | CompileMode::Check { .. } + | CompileMode::Bench + | CompileMode::RunCustomBuild => { + if std::env::var("RUST_FLAGS").is_ok() { + config.shell().warn( + "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?", + )?; + } + } + CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => { + if std::env::var("RUSTDOC_FLAGS").is_ok() { + config.shell().warn( + "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?" + )?; + } + } + } + config.validate_term_config()?; + + let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; + + let all_packages = &Packages::All; + let rustdoc_scrape_examples = &config.cli_unstable().rustdoc_scrape_examples; + let need_reverse_dependencies = rustdoc_scrape_examples.is_some(); + let full_specs = if need_reverse_dependencies { + all_packages + } else { + spec + }; + + let resolve_specs = full_specs.to_package_id_specs(ws)?; + let has_dev_units = if filter.need_dev_deps(build_config.mode) || need_reverse_dependencies { + HasDevUnits::Yes + } else { + HasDevUnits::No + }; + let resolve = ops::resolve_ws_with_opts( + ws, + &target_data, + &build_config.requested_kinds, + cli_features, + &resolve_specs, + has_dev_units, + crate::core::resolver::features::ForceAllTargets::No, + )?; + let WorkspaceResolve { + mut pkg_set, + workspace_resolve, + targeted_resolve: resolve, + resolved_features, + } = resolve; + + let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std { + let (std_package_set, std_resolve, std_features) = + standard_lib::resolve_std(ws, &target_data, &build_config, crates)?; + pkg_set.add_set(std_package_set); + Some((std_resolve, std_features)) + } else { + None + }; + + // Find the packages in the resolver that the user wants to build (those + // passed in with `-p` or the defaults from the workspace), and convert + // Vec to a Vec. + let specs = if need_reverse_dependencies { + spec.to_package_id_specs(ws)? + } else { + resolve_specs.clone() + }; + let to_build_ids = resolve.specs_to_ids(&specs)?; + // Now get the `Package` for each `PackageId`. This may trigger a download + // if the user specified `-p` for a dependency that is not downloaded. + // Dependencies will be downloaded during build_unit_dependencies. + let mut to_builds = pkg_set.get_many(to_build_ids)?; + + // The ordering here affects some error messages coming out of cargo, so + // let's be test and CLI friendly by always printing in the same order if + // there's an error. + to_builds.sort_by_key(|p| p.package_id()); + + for pkg in to_builds.iter() { + pkg.manifest().print_teapot(config); + + if build_config.mode.is_any_test() + && !ws.is_member(pkg) + && pkg.dependencies().iter().any(|dep| !dep.is_transitive()) + { + anyhow::bail!( + "package `{}` cannot be tested because it requires dev-dependencies \ + and is not a member of the workspace", + pkg.name() + ); + } + } + + let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) { + (&Some(ref args), _) => (Some(args.clone()), "rustc"), + (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"), + _ => (None, ""), + }; + + if extra_args.is_some() && to_builds.len() != 1 { + panic!( + "`{}` should not accept multiple `-p` flags", + extra_args_name + ); + } + + let profiles = Profiles::new(ws, build_config.requested_profile)?; + profiles.validate_packages( + ws.profiles(), + &mut config.shell(), + workspace_resolve.as_ref().unwrap_or(&resolve), + )?; + + // If `--target` has not been specified, then the unit graph is built + // assuming `--target $HOST` was specified. See + // `rebuild_unit_graph_shared` for more on why this is done. + let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?); + let explicit_host_kinds: Vec<_> = build_config + .requested_kinds + .iter() + .map(|kind| match kind { + CompileKind::Host => explicit_host_kind, + CompileKind::Target(t) => CompileKind::Target(*t), + }) + .collect(); + + // Passing `build_config.requested_kinds` instead of + // `explicit_host_kinds` here so that `generate_targets` can do + // its own special handling of `CompileKind::Host`. It will + // internally replace the host kind by the `explicit_host_kind` + // before setting as a unit. + let mut units = generate_targets( + ws, + &to_builds, + filter, + &build_config.requested_kinds, + explicit_host_kind, + build_config.mode, + &resolve, + &workspace_resolve, + &resolved_features, + &pkg_set, + &profiles, + interner, + )?; + + if let Some(args) = target_rustc_crate_types { + override_rustc_crate_types(&mut units, args, interner)?; + } + + let mut scrape_units = match rustdoc_scrape_examples { + Some(arg) => { + let filter = match arg.as_str() { + "all" => CompileFilter::new_all_targets(), + "examples" => CompileFilter::new( + LibRule::False, + FilterRule::none(), + FilterRule::none(), + FilterRule::All, + FilterRule::none(), + ), + _ => { + anyhow::bail!( + r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"# + ) + } + }; + let to_build_ids = resolve.specs_to_ids(&resolve_specs)?; + let to_builds = pkg_set.get_many(to_build_ids)?; + let mode = CompileMode::Docscrape; + + generate_targets( + ws, + &to_builds, + &filter, + &build_config.requested_kinds, + explicit_host_kind, + mode, + &resolve, + &workspace_resolve, + &resolved_features, + &pkg_set, + &profiles, + interner, + )? + .into_iter() + // Proc macros should not be scraped for functions, since they only export macros + .filter(|unit| !unit.target.proc_macro()) + .collect::>() + } + None => Vec::new(), + }; + + let std_roots = if let Some(crates) = standard_lib::std_crates(config, Some(&units)) { + let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap(); + standard_lib::generate_std_roots( + &crates, + std_resolve, + std_features, + &explicit_host_kinds, + &pkg_set, + interner, + &profiles, + )? + } else { + Default::default() + }; + + let mut unit_graph = build_unit_dependencies( + ws, + &pkg_set, + &resolve, + &resolved_features, + std_resolve_features.as_ref(), + &units, + &scrape_units, + &std_roots, + build_config.mode, + &target_data, + &profiles, + interner, + )?; + + // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain + // what heuristics to use in that case. + if build_config.mode == (CompileMode::Doc { deps: true }) { + remove_duplicate_doc(build_config, &units, &mut unit_graph); + } + + if build_config + .requested_kinds + .iter() + .any(CompileKind::is_host) + { + // Rebuild the unit graph, replacing the explicit host targets with + // CompileKind::Host, merging any dependencies shared with build + // dependencies. + let new_graph = rebuild_unit_graph_shared( + interner, + unit_graph, + &units, + &scrape_units, + explicit_host_kind, + ); + // This would be nicer with destructuring assignment. + units = new_graph.0; + scrape_units = new_graph.1; + unit_graph = new_graph.2; + } + + let mut extra_compiler_args = HashMap::new(); + if let Some(args) = extra_args { + if units.len() != 1 { + anyhow::bail!( + "extra arguments to `{}` can only be passed to one \ + target, consider filtering\nthe package by passing, \ + e.g., `--lib` or `--bin NAME` to specify a single target", + extra_args_name + ); + } + extra_compiler_args.insert(units[0].clone(), args); + } + + for unit in units + .iter() + .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test()) + .filter(|unit| rustdoc_document_private_items || unit.target.is_bin()) + { + // Add `--document-private-items` rustdoc flag if requested or if + // the target is a binary. Binary crates get their private items + // documented by default. + let mut args = vec!["--document-private-items".into()]; + if unit.target.is_bin() { + // This warning only makes sense if it's possible to document private items + // sometimes and ignore them at other times. But cargo consistently passes + // `--document-private-items`, so the warning isn't useful. + args.push("-Arustdoc::private-intra-doc-links".into()); + } + extra_compiler_args + .entry(unit.clone()) + .or_default() + .extend(args); + } + + if honor_rust_version { + // Remove any pre-release identifiers for easier comparison + let current_version = &target_data.rustc.version; + let untagged_version = semver::Version::new( + current_version.major, + current_version.minor, + current_version.patch, + ); + + for unit in unit_graph.keys() { + let version = match unit.pkg.rust_version() { + Some(v) => v, + None => continue, + }; + + let req = semver::VersionReq::parse(version).unwrap(); + if req.matches(&untagged_version) { + continue; + } + + let guidance = if ws.is_ephemeral() { + if ws.ignore_lock() { + "Try re-running cargo install with `--locked`".to_string() + } else { + String::new() + } + } else if !unit.is_local() { + format!( + "Either upgrade to rustc {} or newer, or use\n\ + cargo update -p {}@{} --precise ver\n\ + where `ver` is the latest version of `{}` supporting rustc {}", + version, + unit.pkg.name(), + unit.pkg.version(), + unit.pkg.name(), + current_version, + ) + } else { + String::new() + }; + + anyhow::bail!( + "package `{}` cannot be built because it requires rustc {} or newer, \ + while the currently active rustc version is {}\n{}", + unit.pkg, + version, + current_version, + guidance, + ); + } + } + + let bcx = BuildContext::new( + ws, + pkg_set, + build_config, + profiles, + extra_compiler_args, + target_data, + units, + unit_graph, + scrape_units, + )?; + + Ok(bcx) +} + +/// A proposed target. +/// +/// Proposed targets are later filtered into actual `Unit`s based on whether or +/// not the target requires its features to be present. +#[derive(Debug)] +struct Proposal<'a> { + pkg: &'a Package, + target: &'a Target, + /// Indicates whether or not all required features *must* be present. If + /// false, and the features are not available, then it will be silently + /// skipped. Generally, targets specified by name (`--bin foo`) are + /// required, all others can be silently skipped if features are missing. + requires_features: bool, + mode: CompileMode, +} + +/// Generates all the base targets for the packages the user has requested to +/// compile. Dependencies for these targets are computed later in `unit_dependencies`. +fn generate_targets( + ws: &Workspace<'_>, + packages: &[&Package], + filter: &CompileFilter, + requested_kinds: &[CompileKind], + explicit_host_kind: CompileKind, + mode: CompileMode, + resolve: &Resolve, + workspace_resolve: &Option, + resolved_features: &features::ResolvedFeatures, + package_set: &PackageSet<'_>, + profiles: &Profiles, + interner: &UnitInterner, +) -> CargoResult> { + let config = ws.config(); + // Helper for creating a list of `Unit` structures + let new_unit = |units: &mut HashSet, + pkg: &Package, + target: &Target, + initial_target_mode: CompileMode| { + // Custom build units are added in `build_unit_dependencies`. + assert!(!target.is_custom_build()); + let target_mode = match initial_target_mode { + CompileMode::Test => { + if target.is_example() && !filter.is_specific() && !target.tested() { + // Examples are included as regular binaries to verify + // that they compile. + CompileMode::Build + } else { + CompileMode::Test + } + } + CompileMode::Build => match *target.kind() { + TargetKind::Test => CompileMode::Test, + TargetKind::Bench => CompileMode::Bench, + _ => CompileMode::Build, + }, + // `CompileMode::Bench` is only used to inform `filter_default_targets` + // which command is being used (`cargo bench`). Afterwards, tests + // and benches are treated identically. Switching the mode allows + // de-duplication of units that are essentially identical. For + // example, `cargo build --all-targets --release` creates the units + // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench) + // and since these are the same, we want them to be de-duplicated in + // `unit_dependencies`. + CompileMode::Bench => CompileMode::Test, + _ => initial_target_mode, + }; + + let is_local = pkg.package_id().source_id().is_path(); + + // No need to worry about build-dependencies, roots are never build dependencies. + let features_for = FeaturesFor::from_for_host(target.proc_macro()); + let features = resolved_features.activated_features(pkg.package_id(), features_for); + + // If `--target` has not been specified, then the unit + // graph is built almost like if `--target $HOST` was + // specified. See `rebuild_unit_graph_shared` for more on + // why this is done. However, if the package has its own + // `package.target` key, then this gets used instead of + // `$HOST` + let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() { + vec![k] + } else { + requested_kinds + .iter() + .map(|kind| match kind { + CompileKind::Host => { + pkg.manifest().default_kind().unwrap_or(explicit_host_kind) + } + CompileKind::Target(t) => CompileKind::Target(*t), + }) + .collect() + }; + + for kind in explicit_kinds.iter() { + let unit_for = if initial_target_mode.is_any_test() { + // NOTE: the `UnitFor` here is subtle. If you have a profile + // with `panic` set, the `panic` flag is cleared for + // tests/benchmarks and their dependencies. If this + // was `normal`, then the lib would get compiled three + // times (once with panic, once without, and once with + // `--test`). + // + // This would cause a problem for doc tests, which would fail + // because `rustdoc` would attempt to link with both libraries + // at the same time. Also, it's probably not important (or + // even desirable?) for rustdoc to link with a lib with + // `panic` set. + // + // As a consequence, Examples and Binaries get compiled + // without `panic` set. This probably isn't a bad deal. + // + // Forcing the lib to be compiled three times during `cargo + // test` is probably also not desirable. + UnitFor::new_test(config, *kind) + } else if target.for_host() { + // Proc macro / plugin should not have `panic` set. + UnitFor::new_compiler(*kind) + } else { + UnitFor::new_normal(*kind) + }; + let profile = profiles.get_profile( + pkg.package_id(), + ws.is_member(pkg), + is_local, + unit_for, + *kind, + ); + let unit = interner.intern( + pkg, + target, + profile, + kind.for_target(target), + target_mode, + features.clone(), + /*is_std*/ false, + /*dep_hash*/ 0, + IsArtifact::No, + ); + units.insert(unit); + } + }; + + // Create a list of proposed targets. + let mut proposals: Vec> = Vec::new(); + + match *filter { + CompileFilter::Default { + required_features_filterable, + } => { + for pkg in packages { + let default = filter_default_targets(pkg.targets(), mode); + proposals.extend(default.into_iter().map(|target| Proposal { + pkg, + target, + requires_features: !required_features_filterable, + mode, + })); + if mode == CompileMode::Test { + if let Some(t) = pkg + .targets() + .iter() + .find(|t| t.is_lib() && t.doctested() && t.doctestable()) + { + proposals.push(Proposal { + pkg, + target: t, + requires_features: false, + mode: CompileMode::Doctest, + }); + } + } + } + } + CompileFilter::Only { + all_targets, + ref lib, + ref bins, + ref examples, + ref tests, + ref benches, + } => { + if *lib != LibRule::False { + let mut libs = Vec::new(); + for proposal in filter_targets(packages, Target::is_lib, false, mode) { + let Proposal { target, pkg, .. } = proposal; + if mode.is_doc_test() && !target.doctestable() { + let types = target.rustc_crate_types(); + let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); + ws.config().shell().warn(format!( + "doc tests are not supported for crate type(s) `{}` in package `{}`", + types_str.join(", "), + pkg.name() + ))?; + } else { + libs.push(proposal) + } + } + if !all_targets && libs.is_empty() && *lib == LibRule::True { + let names = packages.iter().map(|pkg| pkg.name()).collect::>(); + if names.len() == 1 { + anyhow::bail!("no library targets found in package `{}`", names[0]); + } else { + anyhow::bail!("no library targets found in packages: {}", names.join(", ")); + } + } + proposals.extend(libs); + } + + // If `--tests` was specified, add all targets that would be + // generated by `cargo test`. + let test_filter = match tests { + FilterRule::All => Target::tested, + FilterRule::Just(_) => Target::is_test, + }; + let test_mode = match mode { + CompileMode::Build => CompileMode::Test, + CompileMode::Check { .. } => CompileMode::Check { test: true }, + _ => mode, + }; + // If `--benches` was specified, add all targets that would be + // generated by `cargo bench`. + let bench_filter = match benches { + FilterRule::All => Target::benched, + FilterRule::Just(_) => Target::is_bench, + }; + let bench_mode = match mode { + CompileMode::Build => CompileMode::Bench, + CompileMode::Check { .. } => CompileMode::Check { test: true }, + _ => mode, + }; + + proposals.extend(list_rule_targets( + packages, + bins, + "bin", + Target::is_bin, + mode, + )?); + proposals.extend(list_rule_targets( + packages, + examples, + "example", + Target::is_example, + mode, + )?); + proposals.extend(list_rule_targets( + packages, + tests, + "test", + test_filter, + test_mode, + )?); + proposals.extend(list_rule_targets( + packages, + benches, + "bench", + bench_filter, + bench_mode, + )?); + } + } + + // Only include targets that are libraries or have all required + // features available. + // + // `features_map` is a map of &Package -> enabled_features + // It is computed by the set of enabled features for the package plus + // every enabled feature of every enabled dependency. + let mut features_map = HashMap::new(); + // This needs to be a set to de-duplicate units. Due to the way the + // targets are filtered, it is possible to have duplicate proposals for + // the same thing. + let mut units = HashSet::new(); + for Proposal { + pkg, + target, + requires_features, + mode, + } in proposals + { + let unavailable_features = match target.required_features() { + Some(rf) => { + validate_required_features( + workspace_resolve, + target.name(), + rf, + pkg.summary(), + &mut config.shell(), + )?; + + let features = features_map.entry(pkg).or_insert_with(|| { + resolve_all_features(resolve, resolved_features, package_set, pkg.package_id()) + }); + rf.iter().filter(|f| !features.contains(*f)).collect() + } + None => Vec::new(), + }; + if target.is_lib() || unavailable_features.is_empty() { + new_unit(&mut units, pkg, target, mode); + } else if requires_features { + let required_features = target.required_features().unwrap(); + let quoted_required_features: Vec = required_features + .iter() + .map(|s| format!("`{}`", s)) + .collect(); + anyhow::bail!( + "target `{}` in package `{}` requires the features: {}\n\ + Consider enabling them by passing, e.g., `--features=\"{}\"`", + target.name(), + pkg.name(), + quoted_required_features.join(", "), + required_features.join(" ") + ); + } + // else, silently skip target. + } + let mut units: Vec<_> = units.into_iter().collect(); + unmatched_target_filters(&units, filter, &mut ws.config().shell())?; + + // Keep the roots in a consistent order, which helps with checking test output. + units.sort_unstable(); + Ok(units) +} + +/// Checks if the unit list is empty and the user has passed any combination of +/// --tests, --examples, --benches or --bins, and we didn't match on any targets. +/// We want to emit a warning to make sure the user knows that this run is a no-op, +/// and their code remains unchecked despite cargo not returning any errors +fn unmatched_target_filters( + units: &[Unit], + filter: &CompileFilter, + shell: &mut Shell, +) -> CargoResult<()> { + if let CompileFilter::Only { + all_targets, + lib: _, + ref bins, + ref examples, + ref tests, + ref benches, + } = *filter + { + if units.is_empty() { + let mut filters = String::new(); + let mut miss_count = 0; + + let mut append = |t: &FilterRule, s| { + if let FilterRule::All = *t { + miss_count += 1; + filters.push_str(s); + } + }; + + if all_targets { + filters.push_str(" `all-targets`"); + } else { + append(bins, " `bins`,"); + append(tests, " `tests`,"); + append(examples, " `examples`,"); + append(benches, " `benches`,"); + filters.pop(); + } + + return shell.warn(format!( + "Target {}{} specified, but no targets matched. This is a no-op", + if miss_count > 1 { "filters" } else { "filter" }, + filters, + )); + } + } + + Ok(()) +} + +/// Warns if a target's required-features references a feature that doesn't exist. +/// +/// This is a warning because historically this was not validated, and it +/// would cause too much breakage to make it an error. +fn validate_required_features( + resolve: &Option, + target_name: &str, + required_features: &[String], + summary: &Summary, + shell: &mut Shell, +) -> CargoResult<()> { + let resolve = match resolve { + None => return Ok(()), + Some(resolve) => resolve, + }; + + for feature in required_features { + let fv = FeatureValue::new(feature.into()); + match &fv { + FeatureValue::Feature(f) => { + if !summary.features().contains_key(f) { + shell.warn(format!( + "invalid feature `{}` in required-features of target `{}`: \ + `{}` is not present in [features] section", + fv, target_name, fv + ))?; + } + } + FeatureValue::Dep { .. } => { + anyhow::bail!( + "invalid feature `{}` in required-features of target `{}`: \ + `dep:` prefixed feature values are not allowed in required-features", + fv, + target_name + ); + } + FeatureValue::DepFeature { weak: true, .. } => { + anyhow::bail!( + "invalid feature `{}` in required-features of target `{}`: \ + optional dependency with `?` is not allowed in required-features", + fv, + target_name + ); + } + // Handling of dependent_crate/dependent_crate_feature syntax + FeatureValue::DepFeature { + dep_name, + dep_feature, + weak: false, + } => { + match resolve + .deps(summary.package_id()) + .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name)) + { + Some((dep_id, _deps)) => { + let dep_summary = resolve.summary(dep_id); + if !dep_summary.features().contains_key(dep_feature) + && !dep_summary + .dependencies() + .iter() + .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional()) + { + shell.warn(format!( + "invalid feature `{}` in required-features of target `{}`: \ + feature `{}` does not exist in package `{}`", + fv, target_name, dep_feature, dep_id + ))?; + } + } + None => { + shell.warn(format!( + "invalid feature `{}` in required-features of target `{}`: \ + dependency `{}` does not exist", + fv, target_name, dep_name + ))?; + } + } + } + } + } + Ok(()) +} + +/// Gets all of the features enabled for a package, plus its dependencies' +/// features. +/// +/// Dependencies are added as `dep_name/feat_name` because `required-features` +/// wants to support that syntax. +pub fn resolve_all_features( + resolve_with_overrides: &Resolve, + resolved_features: &features::ResolvedFeatures, + package_set: &PackageSet<'_>, + package_id: PackageId, +) -> HashSet { + let mut features: HashSet = resolved_features + .activated_features(package_id, FeaturesFor::NormalOrDev) + .iter() + .map(|s| s.to_string()) + .collect(); + + // Include features enabled for use by dependencies so targets can also use them with the + // required-features field when deciding whether to be built or skipped. + for (dep_id, deps) in resolve_with_overrides.deps(package_id) { + let is_proc_macro = package_set + .get_one(dep_id) + .expect("packages downloaded") + .proc_macro(); + for dep in deps { + let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build()); + for feature in resolved_features + .activated_features_unverified(dep_id, features_for) + .unwrap_or_default() + { + features.insert(format!("{}/{}", dep.name_in_toml(), feature)); + } + } + } + + features +} + +/// Given a list of all targets for a package, filters out only the targets +/// that are automatically included when the user doesn't specify any targets. +fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> { + match mode { + CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(), + CompileMode::Test => targets + .iter() + .filter(|t| t.tested() || t.is_example()) + .collect(), + CompileMode::Build | CompileMode::Check { .. } => targets + .iter() + .filter(|t| t.is_bin() || t.is_lib()) + .collect(), + CompileMode::Doc { .. } => { + // `doc` does lib and bins (bin with same name as lib is skipped). + targets + .iter() + .filter(|t| { + t.documented() + && (!t.is_bin() + || !targets.iter().any(|l| l.is_lib() && l.name() == t.name())) + }) + .collect() + } + CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => { + panic!("Invalid mode {:?}", mode) + } + } +} + +/// Returns a list of proposed targets based on command-line target selection flags. +fn list_rule_targets<'a>( + packages: &[&'a Package], + rule: &FilterRule, + target_desc: &'static str, + is_expected_kind: fn(&Target) -> bool, + mode: CompileMode, +) -> CargoResult>> { + let mut proposals = Vec::new(); + match rule { + FilterRule::All => { + proposals.extend(filter_targets(packages, is_expected_kind, false, mode)) + } + FilterRule::Just(names) => { + for name in names { + proposals.extend(find_named_targets( + packages, + name, + target_desc, + is_expected_kind, + mode, + )?); + } + } + } + Ok(proposals) +} + +/// Finds the targets for a specifically named target. +fn find_named_targets<'a>( + packages: &[&'a Package], + target_name: &str, + target_desc: &'static str, + is_expected_kind: fn(&Target) -> bool, + mode: CompileMode, +) -> CargoResult>> { + let is_glob = is_glob_pattern(target_name); + let proposals = if is_glob { + let pattern = build_glob(target_name)?; + let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name()); + filter_targets(packages, filter, true, mode) + } else { + let filter = |t: &Target| t.name() == target_name && is_expected_kind(t); + filter_targets(packages, filter, true, mode) + }; + + if proposals.is_empty() { + let targets = packages + .iter() + .flat_map(|pkg| { + pkg.targets() + .iter() + .filter(|target| is_expected_kind(target)) + }) + .collect::>(); + let suggestion = closest_msg(target_name, targets.iter(), |t| t.name()); + if !suggestion.is_empty() { + anyhow::bail!( + "no {} target {} `{}`{}", + target_desc, + if is_glob { "matches pattern" } else { "named" }, + target_name, + suggestion + ); + } else { + let mut msg = String::new(); + writeln!( + msg, + "no {} target {} `{}`.", + target_desc, + if is_glob { "matches pattern" } else { "named" }, + target_name, + )?; + if !targets.is_empty() { + writeln!(msg, "Available {} targets:", target_desc)?; + for target in targets { + writeln!(msg, " {}", target.name())?; + } + } + anyhow::bail!(msg); + } + } + Ok(proposals) +} + +fn filter_targets<'a>( + packages: &[&'a Package], + predicate: impl Fn(&Target) -> bool, + requires_features: bool, + mode: CompileMode, +) -> Vec> { + let mut proposals = Vec::new(); + for pkg in packages { + for target in pkg.targets().iter().filter(|t| predicate(t)) { + proposals.push(Proposal { + pkg, + target, + requires_features, + mode, + }); + } + } + proposals +} + +/// This is used to rebuild the unit graph, sharing host dependencies if possible. +/// +/// This will translate any unit's `CompileKind::Target(host)` to +/// `CompileKind::Host` if the kind is equal to `to_host`. This also handles +/// generating the unit `dep_hash`, and merging shared units if possible. +/// +/// This is necessary because if normal dependencies used `CompileKind::Host`, +/// there would be no way to distinguish those units from build-dependency +/// units. This can cause a problem if a shared normal/build dependency needs +/// to link to another dependency whose features differ based on whether or +/// not it is a normal or build dependency. If both units used +/// `CompileKind::Host`, then they would end up being identical, causing a +/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one +/// value or the other. +/// +/// The solution is to keep normal and build dependencies separate when +/// building the unit graph, and then run this second pass which will try to +/// combine shared dependencies safely. By adding a hash of the dependencies +/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host` +/// without fear of an unwanted collision. +fn rebuild_unit_graph_shared( + interner: &UnitInterner, + unit_graph: UnitGraph, + roots: &[Unit], + scrape_units: &[Unit], + to_host: CompileKind, +) -> (Vec, Vec, UnitGraph) { + let mut result = UnitGraph::new(); + // Map of the old unit to the new unit, used to avoid recursing into units + // that have already been computed to improve performance. + let mut memo = HashMap::new(); + let new_roots = roots + .iter() + .map(|root| { + traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host) + }) + .collect(); + let new_scrape_units = scrape_units + .iter() + .map(|unit| memo.get(unit).unwrap().clone()) + .collect(); + (new_roots, new_scrape_units, result) +} + +/// Recursive function for rebuilding the graph. +/// +/// This walks `unit_graph`, starting at the given `unit`. It inserts the new +/// units into `new_graph`, and returns a new updated version of the given +/// unit (`dep_hash` is filled in, and `kind` switched if necessary). +fn traverse_and_share( + interner: &UnitInterner, + memo: &mut HashMap, + new_graph: &mut UnitGraph, + unit_graph: &UnitGraph, + unit: &Unit, + to_host: CompileKind, +) -> Unit { + if let Some(new_unit) = memo.get(unit) { + // Already computed, no need to recompute. + return new_unit.clone(); + } + let mut dep_hash = StableHasher::new(); + let new_deps: Vec<_> = unit_graph[unit] + .iter() + .map(|dep| { + let new_dep_unit = + traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host); + new_dep_unit.hash(&mut dep_hash); + UnitDep { + unit: new_dep_unit, + ..dep.clone() + } + }) + .collect(); + let new_dep_hash = dep_hash.finish(); + let new_kind = if unit.kind == to_host { + CompileKind::Host + } else { + unit.kind + }; + let new_unit = interner.intern( + &unit.pkg, + &unit.target, + unit.profile.clone(), + new_kind, + unit.mode, + unit.features.clone(), + unit.is_std, + new_dep_hash, + unit.artifact, + ); + assert!(memo.insert(unit.clone(), new_unit.clone()).is_none()); + new_graph.entry(new_unit.clone()).or_insert(new_deps); + new_unit +} + +/// Removes duplicate CompileMode::Doc units that would cause problems with +/// filename collisions. +/// +/// Rustdoc only separates units by crate name in the file directory +/// structure. If any two units with the same crate name exist, this would +/// cause a filename collision, causing different rustdoc invocations to stomp +/// on one another's files. +/// +/// Unfortunately this does not remove all duplicates, as some of them are +/// either user error, or difficult to remove. Cases that I can think of: +/// +/// - Same target name in different packages. See the `collision_doc` test. +/// - Different sources. See `collision_doc_sources` test. +/// +/// Ideally this would not be necessary. +fn remove_duplicate_doc( + build_config: &BuildConfig, + root_units: &[Unit], + unit_graph: &mut UnitGraph, +) { + // First, create a mapping of crate_name -> Unit so we can see where the + // duplicates are. + let mut all_docs: HashMap> = HashMap::new(); + for unit in unit_graph.keys() { + if unit.mode.is_doc() { + all_docs + .entry(unit.target.crate_name()) + .or_default() + .push(unit.clone()); + } + } + // Keep track of units to remove so that they can be efficiently removed + // from the unit_deps. + let mut removed_units: HashSet = HashSet::new(); + let mut remove = |units: Vec, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec { + let (to_remove, remaining_units): (Vec, Vec) = units + .into_iter() + .partition(|unit| cb(unit) && !root_units.contains(unit)); + for unit in to_remove { + log::debug!( + "removing duplicate doc due to {} for package {} target `{}`", + reason, + unit.pkg, + unit.target.name() + ); + unit_graph.remove(&unit); + removed_units.insert(unit); + } + remaining_units + }; + // Iterate over the duplicates and try to remove them from unit_graph. + for (_crate_name, mut units) in all_docs { + if units.len() == 1 { + continue; + } + // Prefer target over host if --target was not specified. + if build_config + .requested_kinds + .iter() + .all(CompileKind::is_host) + { + // Note these duplicates may not be real duplicates, since they + // might get merged in rebuild_unit_graph_shared. Either way, it + // shouldn't hurt to remove them early (although the report in the + // log might be confusing). + units = remove(units, "host/target merger", &|unit| unit.kind.is_host()); + if units.len() == 1 { + continue; + } + } + // Prefer newer versions over older. + let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec> = + HashMap::new(); + for unit in units { + let pkg_id = unit.pkg.package_id(); + // Note, this does not detect duplicates from different sources. + source_map + .entry((pkg_id.name(), pkg_id.source_id(), unit.kind)) + .or_default() + .push(unit); + } + let mut remaining_units = Vec::new(); + for (_key, mut units) in source_map { + if units.len() > 1 { + units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap()); + // Remove any entries with version < newest. + let newest_version = units.last().unwrap().pkg.version().clone(); + let keep_units = remove(units, "older version", &|unit| { + unit.pkg.version() < &newest_version + }); + remaining_units.extend(keep_units); + } else { + remaining_units.extend(units); + } + } + if remaining_units.len() == 1 { + continue; + } + // Are there other heuristics to remove duplicates that would make + // sense? Maybe prefer path sources over all others? + } + // Also remove units from the unit_deps so there aren't any dangling edges. + for unit_deps in unit_graph.values_mut() { + unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit)); + } + // Remove any orphan units that were detached from the graph. + let mut visited = HashSet::new(); + fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet) { + if !visited.insert(unit.clone()) { + return; + } + for dep in &graph[unit] { + visit(&dep.unit, graph, visited); + } + } + for unit in root_units { + visit(unit, unit_graph, &mut visited); + } + unit_graph.retain(|unit, _| visited.contains(unit)); +} + +/// Override crate types for given units. +/// +/// This is primarily used by `cargo rustc --crate-type`. +fn override_rustc_crate_types( + units: &mut [Unit], + args: &[String], + interner: &UnitInterner, +) -> CargoResult<()> { + if units.len() != 1 { + anyhow::bail!( + "crate types to rustc can only be passed to one \ + target, consider filtering\nthe package by passing, \ + e.g., `--lib` or `--example` to specify a single target" + ); + } + + let unit = &units[0]; + let override_unit = |f: fn(Vec) -> TargetKind| { + let crate_types = args.iter().map(|s| s.into()).collect(); + let mut target = unit.target.clone(); + target.set_kind(f(crate_types)); + interner.intern( + &unit.pkg, + &target, + unit.profile.clone(), + unit.kind, + unit.mode, + unit.features.clone(), + unit.is_std, + unit.dep_hash, + unit.artifact, + ) + }; + units[0] = match unit.target.kind() { + TargetKind::Lib(_) => override_unit(TargetKind::Lib), + TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib), + _ => { + anyhow::bail!( + "crate types can only be specified for libraries and example libraries.\n\ + Binaries, tests, and benchmarks are always the `bin` crate type" + ); + } + }; + + Ok(()) +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/packages.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/packages.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/packages.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile/packages.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,220 @@ +//! See [`Packages`]. + +use std::collections::BTreeSet; + +use crate::core::Package; +use crate::core::{PackageIdSpec, Workspace}; +use crate::util::restricted_names::is_glob_pattern; +use crate::util::CargoResult; + +use anyhow::{bail, Context as _}; + +/// Represents the selected pacakges that will be built. +/// +/// Generally, it represents the combination of all `-p` flag. When working within +/// a workspace, `--exclude` and `--workspace` flags also contribute to it. +#[derive(PartialEq, Eq, Debug)] +pub enum Packages { + /// Pacakges selected by default. Ususally means no flag provided. + Default, + /// Opt in all packages. + /// + /// As of the time of this writing, it only works on opting in all workspace members. + All, + /// Opt out of packages passed in. + /// + /// As of the time of this writing, it only works on opting out workspace members. + OptOut(Vec), + /// A sequence of hand-picked packages that will be built. Normally done by `-p` flag. + Packages(Vec), +} + +impl Packages { + /// Creates a `Packages` from flags which are generally equivalent to command line flags. + pub fn from_flags(all: bool, exclude: Vec, package: Vec) -> CargoResult { + Ok(match (all, exclude.len(), package.len()) { + (false, 0, 0) => Packages::Default, + (false, 0, _) => Packages::Packages(package), + (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"), + (true, 0, _) => Packages::All, + (true, _, _) => Packages::OptOut(exclude), + }) + } + + /// Converts selected packages to [`PackageIdSpec`]s. + pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult> { + let specs = match self { + Packages::All => ws + .members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(), + Packages::OptOut(opt_out) => { + let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; + let specs = ws + .members() + .filter(|pkg| { + !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) + }) + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(); + let warn = |e| ws.config().shell().warn(e); + emit_package_not_found(ws, names, true).or_else(warn)?; + emit_pattern_not_found(ws, patterns, true).or_else(warn)?; + specs + } + Packages::Packages(packages) if packages.is_empty() => { + vec![PackageIdSpec::from_package_id(ws.current()?.package_id())] + } + Packages::Packages(opt_in) => { + let (mut patterns, packages) = opt_patterns_and_names(opt_in)?; + let mut specs = packages + .iter() + .map(|p| PackageIdSpec::parse(p)) + .collect::>>()?; + if !patterns.is_empty() { + let matched_pkgs = ws + .members() + .filter(|pkg| match_patterns(pkg, &mut patterns)) + .map(Package::package_id) + .map(PackageIdSpec::from_package_id); + specs.extend(matched_pkgs); + } + emit_pattern_not_found(ws, patterns, false)?; + specs + } + Packages::Default => ws + .default_members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(), + }; + if specs.is_empty() { + if ws.is_virtual() { + bail!( + "manifest path `{}` contains no package: The manifest is virtual, \ + and the workspace has no members.", + ws.root().display() + ) + } + bail!("no packages to compile") + } + Ok(specs) + } + + /// Gets a list of selected [`Package`]s. + pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult> { + let packages: Vec<_> = match self { + Packages::Default => ws.default_members().collect(), + Packages::All => ws.members().collect(), + Packages::OptOut(opt_out) => { + let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; + let packages = ws + .members() + .filter(|pkg| { + !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) + }) + .collect(); + emit_package_not_found(ws, names, true)?; + emit_pattern_not_found(ws, patterns, true)?; + packages + } + Packages::Packages(opt_in) => { + let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?; + let packages = ws + .members() + .filter(|pkg| { + names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns) + }) + .collect(); + emit_package_not_found(ws, names, false)?; + emit_pattern_not_found(ws, patterns, false)?; + packages + } + }; + Ok(packages) + } + + /// Returns whether or not the user needs to pass a `-p` flag to target a + /// specific package in the workspace. + pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool { + match self { + Packages::Default => ws.default_members().count() > 1, + Packages::All => ws.members().count() > 1, + Packages::Packages(_) => true, + Packages::OptOut(_) => true, + } + } +} + +/// Emits "package not found" error. +fn emit_package_not_found( + ws: &Workspace<'_>, + opt_names: BTreeSet<&str>, + opt_out: bool, +) -> CargoResult<()> { + if !opt_names.is_empty() { + anyhow::bail!( + "{}package(s) `{}` not found in workspace `{}`", + if opt_out { "excluded " } else { "" }, + opt_names.into_iter().collect::>().join(", "), + ws.root().display(), + ) + } + Ok(()) +} + +/// Emits "glob pattern not found" error. +fn emit_pattern_not_found( + ws: &Workspace<'_>, + opt_patterns: Vec<(glob::Pattern, bool)>, + opt_out: bool, +) -> CargoResult<()> { + let not_matched = opt_patterns + .iter() + .filter(|(_, matched)| !*matched) + .map(|(pat, _)| pat.as_str()) + .collect::>(); + if !not_matched.is_empty() { + anyhow::bail!( + "{}package pattern(s) `{}` not found in workspace `{}`", + if opt_out { "excluded " } else { "" }, + not_matched.join(", "), + ws.root().display(), + ) + } + Ok(()) +} + +/// Given a list opt-in or opt-out package selection strings, generates two +/// collections that represent glob patterns and package names respectively. +fn opt_patterns_and_names( + opt: &[String], +) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> { + let mut opt_patterns = Vec::new(); + let mut opt_names = BTreeSet::new(); + for x in opt.iter() { + if is_glob_pattern(x) { + opt_patterns.push((build_glob(x)?, false)); + } else { + opt_names.insert(String::as_str(x)); + } + } + Ok((opt_patterns, opt_names)) +} + +/// Checks whether a package matches any of a list of glob patterns generated +/// from `opt_patterns_and_names`. +fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool { + patterns.iter_mut().any(|(m, matched)| { + let is_matched = m.matches(pkg.name().as_str()); + *matched |= is_matched; + is_matched + }) +} + +/// Build [`glob::Pattern`] with informative context. +pub fn build_glob(pat: &str) -> CargoResult { + glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat)) +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_compile.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1933 +0,0 @@ -//! The Cargo "compile" operation. -//! -//! This module contains the entry point for starting the compilation process -//! for commands like `build`, `test`, `doc`, `rustc`, etc. -//! -//! The `compile` function will do all the work to compile a workspace. A -//! rough outline is: -//! -//! - Resolve the dependency graph (see `ops::resolve`). -//! - Download any packages needed (see `PackageSet`). -//! - Generate a list of top-level "units" of work for the targets the user -//! requested on the command-line. Each `Unit` corresponds to a compiler -//! invocation. This is done in this module (`generate_targets`). -//! - Build the graph of `Unit` dependencies (see -//! `core::compiler::context::unit_dependencies`). -//! - Create a `Context` which will perform the following steps: -//! - Prepare the `target` directory (see `Layout`). -//! - Create a job queue (see `JobQueue`). The queue checks the -//! fingerprint of each `Unit` to determine if it should run or be -//! skipped. -//! - Execute the queue. Each leaf in the queue's dependency graph is -//! executed, and then removed from the graph when finished. This -//! repeats until the queue is empty. - -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::fmt::Write; -use std::hash::{Hash, Hasher}; -use std::sync::Arc; - -use crate::core::compiler::unit_dependencies::{build_unit_dependencies, IsArtifact}; -use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; -use crate::core::compiler::{standard_lib, CrateType, TargetInfo}; -use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; -use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; -use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; -use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; -use crate::core::resolver::{HasDevUnits, Resolve}; -use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target}; -use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace}; -use crate::drop_println; -use crate::ops; -use crate::ops::resolve::WorkspaceResolve; -use crate::util::config::Config; -use crate::util::interning::InternedString; -use crate::util::restricted_names::is_glob_pattern; -use crate::util::{closest_msg, profile, CargoResult, StableHasher}; - -use anyhow::{bail, Context as _}; - -/// Contains information about how a package should be compiled. -/// -/// Note on distinction between `CompileOptions` and `BuildConfig`: -/// `BuildConfig` contains values that need to be retained after -/// `BuildContext` is created. The other fields are no longer necessary. Think -/// of it as `CompileOptions` are high-level settings requested on the -/// command-line, and `BuildConfig` are low-level settings for actually -/// driving `rustc`. -#[derive(Debug)] -pub struct CompileOptions { - /// Configuration information for a rustc build - pub build_config: BuildConfig, - /// Feature flags requested by the user. - pub cli_features: CliFeatures, - /// A set of packages to build. - pub spec: Packages, - /// Filter to apply to the root package to select which targets will be - /// built. - pub filter: CompileFilter, - /// Extra arguments to be passed to rustdoc (single target only) - pub target_rustdoc_args: Option>, - /// The specified target will be compiled with all the available arguments, - /// note that this only accounts for the *final* invocation of rustc - pub target_rustc_args: Option>, - /// Crate types to be passed to rustc (single target only) - pub target_rustc_crate_types: Option>, - /// Extra arguments passed to all selected targets for rustdoc. - pub local_rustdoc_args: Option>, - /// Whether the `--document-private-items` flags was specified and should - /// be forwarded to `rustdoc`. - pub rustdoc_document_private_items: bool, - /// Whether the build process should check the minimum Rust version - /// defined in the cargo metadata for a crate. - pub honor_rust_version: bool, -} - -impl CompileOptions { - pub fn new(config: &Config, mode: CompileMode) -> CargoResult { - let jobs = None; - let keep_going = false; - Ok(CompileOptions { - build_config: BuildConfig::new(config, jobs, keep_going, &[], mode)?, - cli_features: CliFeatures::new_all(false), - spec: ops::Packages::Packages(Vec::new()), - filter: CompileFilter::Default { - required_features_filterable: false, - }, - target_rustdoc_args: None, - target_rustc_args: None, - target_rustc_crate_types: None, - local_rustdoc_args: None, - rustdoc_document_private_items: false, - honor_rust_version: true, - }) - } -} - -#[derive(PartialEq, Eq, Debug)] -pub enum Packages { - Default, - All, - OptOut(Vec), - Packages(Vec), -} - -impl Packages { - pub fn from_flags(all: bool, exclude: Vec, package: Vec) -> CargoResult { - Ok(match (all, exclude.len(), package.len()) { - (false, 0, 0) => Packages::Default, - (false, 0, _) => Packages::Packages(package), - (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"), - (true, 0, _) => Packages::All, - (true, _, _) => Packages::OptOut(exclude), - }) - } - - /// Converts selected packages from a workspace to `PackageIdSpec`s. - pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult> { - let specs = match self { - Packages::All => ws - .members() - .map(Package::package_id) - .map(PackageIdSpec::from_package_id) - .collect(), - Packages::OptOut(opt_out) => { - let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; - let specs = ws - .members() - .filter(|pkg| { - !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) - }) - .map(Package::package_id) - .map(PackageIdSpec::from_package_id) - .collect(); - let warn = |e| ws.config().shell().warn(e); - emit_package_not_found(ws, names, true).or_else(warn)?; - emit_pattern_not_found(ws, patterns, true).or_else(warn)?; - specs - } - Packages::Packages(packages) if packages.is_empty() => { - vec![PackageIdSpec::from_package_id(ws.current()?.package_id())] - } - Packages::Packages(opt_in) => { - let (mut patterns, packages) = opt_patterns_and_names(opt_in)?; - let mut specs = packages - .iter() - .map(|p| PackageIdSpec::parse(p)) - .collect::>>()?; - if !patterns.is_empty() { - let matched_pkgs = ws - .members() - .filter(|pkg| match_patterns(pkg, &mut patterns)) - .map(Package::package_id) - .map(PackageIdSpec::from_package_id); - specs.extend(matched_pkgs); - } - emit_pattern_not_found(ws, patterns, false)?; - specs - } - Packages::Default => ws - .default_members() - .map(Package::package_id) - .map(PackageIdSpec::from_package_id) - .collect(), - }; - if specs.is_empty() { - if ws.is_virtual() { - bail!( - "manifest path `{}` contains no package: The manifest is virtual, \ - and the workspace has no members.", - ws.root().display() - ) - } - bail!("no packages to compile") - } - Ok(specs) - } - - /// Gets a list of selected packages from a workspace. - pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult> { - let packages: Vec<_> = match self { - Packages::Default => ws.default_members().collect(), - Packages::All => ws.members().collect(), - Packages::OptOut(opt_out) => { - let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; - let packages = ws - .members() - .filter(|pkg| { - !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) - }) - .collect(); - emit_package_not_found(ws, names, true)?; - emit_pattern_not_found(ws, patterns, true)?; - packages - } - Packages::Packages(opt_in) => { - let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?; - let packages = ws - .members() - .filter(|pkg| { - names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns) - }) - .collect(); - emit_package_not_found(ws, names, false)?; - emit_pattern_not_found(ws, patterns, false)?; - packages - } - }; - Ok(packages) - } - - /// Returns whether or not the user needs to pass a `-p` flag to target a - /// specific package in the workspace. - pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool { - match self { - Packages::Default => ws.default_members().count() > 1, - Packages::All => ws.members().count() > 1, - Packages::Packages(_) => true, - Packages::OptOut(_) => true, - } - } -} - -#[derive(Debug, PartialEq, Eq)] -pub enum LibRule { - /// Include the library, fail if not present - True, - /// Include the library if present - Default, - /// Exclude the library - False, -} - -#[derive(Debug)] -pub enum FilterRule { - All, - Just(Vec), -} - -#[derive(Debug)] -pub enum CompileFilter { - Default { - /// Flag whether targets can be safely skipped when required-features are not satisfied. - required_features_filterable: bool, - }, - Only { - all_targets: bool, - lib: LibRule, - bins: FilterRule, - examples: FilterRule, - tests: FilterRule, - benches: FilterRule, - }, -} - -pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult> { - let exec: Arc = Arc::new(DefaultExecutor); - compile_with_exec(ws, options, &exec) -} - -/// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build -/// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through. -pub fn compile_with_exec<'a>( - ws: &Workspace<'a>, - options: &CompileOptions, - exec: &Arc, -) -> CargoResult> { - ws.emit_warnings()?; - compile_ws(ws, options, exec) -} - -pub fn compile_ws<'a>( - ws: &Workspace<'a>, - options: &CompileOptions, - exec: &Arc, -) -> CargoResult> { - let interner = UnitInterner::new(); - let bcx = create_bcx(ws, options, &interner)?; - if options.build_config.unit_graph { - unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?; - return Compilation::new(&bcx); - } - let _p = profile::start("compiling"); - let cx = Context::new(&bcx)?; - cx.compile(exec) -} - -pub fn print<'a>( - ws: &Workspace<'a>, - options: &CompileOptions, - print_opt_value: &str, -) -> CargoResult<()> { - let CompileOptions { - ref build_config, - ref target_rustc_args, - .. - } = *options; - let config = ws.config(); - let rustc = config.load_global_rustc(Some(ws))?; - for (index, kind) in build_config.requested_kinds.iter().enumerate() { - if index != 0 { - drop_println!(config); - } - let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?; - let mut process = rustc.process(); - process.args(&target_info.rustflags); - if let Some(args) = target_rustc_args { - process.args(args); - } - if let CompileKind::Target(t) = kind { - process.arg("--target").arg(t.short_name()); - } - process.arg("--print").arg(print_opt_value); - process.exec()?; - } - Ok(()) -} - -pub fn create_bcx<'a, 'cfg>( - ws: &'a Workspace<'cfg>, - options: &'a CompileOptions, - interner: &'a UnitInterner, -) -> CargoResult> { - let CompileOptions { - ref build_config, - ref spec, - ref cli_features, - ref filter, - ref target_rustdoc_args, - ref target_rustc_args, - ref target_rustc_crate_types, - ref local_rustdoc_args, - rustdoc_document_private_items, - honor_rust_version, - } = *options; - let config = ws.config(); - - // Perform some pre-flight validation. - match build_config.mode { - CompileMode::Test - | CompileMode::Build - | CompileMode::Check { .. } - | CompileMode::Bench - | CompileMode::RunCustomBuild => { - if std::env::var("RUST_FLAGS").is_ok() { - config.shell().warn( - "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?", - )?; - } - } - CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => { - if std::env::var("RUSTDOC_FLAGS").is_ok() { - config.shell().warn( - "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?" - )?; - } - } - } - config.validate_term_config()?; - - let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; - - let all_packages = &Packages::All; - let rustdoc_scrape_examples = &config.cli_unstable().rustdoc_scrape_examples; - let need_reverse_dependencies = rustdoc_scrape_examples.is_some(); - let full_specs = if need_reverse_dependencies { - all_packages - } else { - spec - }; - - let resolve_specs = full_specs.to_package_id_specs(ws)?; - let has_dev_units = if filter.need_dev_deps(build_config.mode) || need_reverse_dependencies { - HasDevUnits::Yes - } else { - HasDevUnits::No - }; - let resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &build_config.requested_kinds, - cli_features, - &resolve_specs, - has_dev_units, - crate::core::resolver::features::ForceAllTargets::No, - )?; - let WorkspaceResolve { - mut pkg_set, - workspace_resolve, - targeted_resolve: resolve, - resolved_features, - } = resolve; - - let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std { - let (std_package_set, std_resolve, std_features) = - standard_lib::resolve_std(ws, &target_data, &build_config, crates)?; - pkg_set.add_set(std_package_set); - Some((std_resolve, std_features)) - } else { - None - }; - - // Find the packages in the resolver that the user wants to build (those - // passed in with `-p` or the defaults from the workspace), and convert - // Vec to a Vec. - let specs = if need_reverse_dependencies { - spec.to_package_id_specs(ws)? - } else { - resolve_specs.clone() - }; - let to_build_ids = resolve.specs_to_ids(&specs)?; - // Now get the `Package` for each `PackageId`. This may trigger a download - // if the user specified `-p` for a dependency that is not downloaded. - // Dependencies will be downloaded during build_unit_dependencies. - let mut to_builds = pkg_set.get_many(to_build_ids)?; - - // The ordering here affects some error messages coming out of cargo, so - // let's be test and CLI friendly by always printing in the same order if - // there's an error. - to_builds.sort_by_key(|p| p.package_id()); - - for pkg in to_builds.iter() { - pkg.manifest().print_teapot(config); - - if build_config.mode.is_any_test() - && !ws.is_member(pkg) - && pkg.dependencies().iter().any(|dep| !dep.is_transitive()) - { - anyhow::bail!( - "package `{}` cannot be tested because it requires dev-dependencies \ - and is not a member of the workspace", - pkg.name() - ); - } - } - - let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) { - (&Some(ref args), _) => (Some(args.clone()), "rustc"), - (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"), - _ => (None, ""), - }; - - if extra_args.is_some() && to_builds.len() != 1 { - panic!( - "`{}` should not accept multiple `-p` flags", - extra_args_name - ); - } - - let profiles = Profiles::new(ws, build_config.requested_profile)?; - profiles.validate_packages( - ws.profiles(), - &mut config.shell(), - workspace_resolve.as_ref().unwrap_or(&resolve), - )?; - - // If `--target` has not been specified, then the unit graph is built - // assuming `--target $HOST` was specified. See - // `rebuild_unit_graph_shared` for more on why this is done. - let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?); - let explicit_host_kinds: Vec<_> = build_config - .requested_kinds - .iter() - .map(|kind| match kind { - CompileKind::Host => explicit_host_kind, - CompileKind::Target(t) => CompileKind::Target(*t), - }) - .collect(); - - // Passing `build_config.requested_kinds` instead of - // `explicit_host_kinds` here so that `generate_targets` can do - // its own special handling of `CompileKind::Host`. It will - // internally replace the host kind by the `explicit_host_kind` - // before setting as a unit. - let mut units = generate_targets( - ws, - &to_builds, - filter, - &build_config.requested_kinds, - explicit_host_kind, - build_config.mode, - &resolve, - &workspace_resolve, - &resolved_features, - &pkg_set, - &profiles, - interner, - )?; - - if let Some(args) = target_rustc_crate_types { - override_rustc_crate_types(&mut units, args, interner)?; - } - - let mut scrape_units = match rustdoc_scrape_examples { - Some(arg) => { - let filter = match arg.as_str() { - "all" => CompileFilter::new_all_targets(), - "examples" => CompileFilter::new( - LibRule::False, - FilterRule::none(), - FilterRule::none(), - FilterRule::All, - FilterRule::none(), - ), - _ => { - bail!( - r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"# - ) - } - }; - let to_build_ids = resolve.specs_to_ids(&resolve_specs)?; - let to_builds = pkg_set.get_many(to_build_ids)?; - let mode = CompileMode::Docscrape; - - generate_targets( - ws, - &to_builds, - &filter, - &build_config.requested_kinds, - explicit_host_kind, - mode, - &resolve, - &workspace_resolve, - &resolved_features, - &pkg_set, - &profiles, - interner, - )? - .into_iter() - // Proc macros should not be scraped for functions, since they only export macros - .filter(|unit| !unit.target.proc_macro()) - .collect::>() - } - None => Vec::new(), - }; - - let std_roots = if let Some(crates) = standard_lib::std_crates(config, Some(&units)) { - let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap(); - standard_lib::generate_std_roots( - &crates, - std_resolve, - std_features, - &explicit_host_kinds, - &pkg_set, - interner, - &profiles, - )? - } else { - Default::default() - }; - - let mut unit_graph = build_unit_dependencies( - ws, - &pkg_set, - &resolve, - &resolved_features, - std_resolve_features.as_ref(), - &units, - &scrape_units, - &std_roots, - build_config.mode, - &target_data, - &profiles, - interner, - )?; - - // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain - // what heuristics to use in that case. - if build_config.mode == (CompileMode::Doc { deps: true }) { - remove_duplicate_doc(build_config, &units, &mut unit_graph); - } - - if build_config - .requested_kinds - .iter() - .any(CompileKind::is_host) - { - // Rebuild the unit graph, replacing the explicit host targets with - // CompileKind::Host, merging any dependencies shared with build - // dependencies. - let new_graph = rebuild_unit_graph_shared( - interner, - unit_graph, - &units, - &scrape_units, - explicit_host_kind, - ); - // This would be nicer with destructuring assignment. - units = new_graph.0; - scrape_units = new_graph.1; - unit_graph = new_graph.2; - } - - let mut extra_compiler_args = HashMap::new(); - if let Some(args) = extra_args { - if units.len() != 1 { - anyhow::bail!( - "extra arguments to `{}` can only be passed to one \ - target, consider filtering\nthe package by passing, \ - e.g., `--lib` or `--bin NAME` to specify a single target", - extra_args_name - ); - } - extra_compiler_args.insert(units[0].clone(), args); - } - - for unit in &units { - if unit.mode.is_doc() || unit.mode.is_doc_test() { - let mut extra_args = local_rustdoc_args.clone(); - - // Add `--document-private-items` rustdoc flag if requested or if - // the target is a binary. Binary crates get their private items - // documented by default. - if rustdoc_document_private_items || unit.target.is_bin() { - let mut args = extra_args.take().unwrap_or_default(); - args.push("--document-private-items".into()); - if unit.target.is_bin() { - // This warning only makes sense if it's possible to document private items - // sometimes and ignore them at other times. But cargo consistently passes - // `--document-private-items`, so the warning isn't useful. - args.push("-Arustdoc::private-intra-doc-links".into()); - } - extra_args = Some(args); - } - - if let Some(args) = extra_args { - extra_compiler_args - .entry(unit.clone()) - .or_default() - .extend(args); - } - } - } - - if honor_rust_version { - // Remove any pre-release identifiers for easier comparison - let current_version = &target_data.rustc.version; - let untagged_version = semver::Version::new( - current_version.major, - current_version.minor, - current_version.patch, - ); - - for unit in unit_graph.keys() { - let version = match unit.pkg.rust_version() { - Some(v) => v, - None => continue, - }; - - let req = semver::VersionReq::parse(version).unwrap(); - if req.matches(&untagged_version) { - continue; - } - - let guidance = if ws.is_ephemeral() { - if ws.ignore_lock() { - "Try re-running cargo install with `--locked`".to_string() - } else { - String::new() - } - } else if !unit.is_local() { - format!( - "Either upgrade to rustc {} or newer, or use\n\ - cargo update -p {}@{} --precise ver\n\ - where `ver` is the latest version of `{}` supporting rustc {}", - version, - unit.pkg.name(), - unit.pkg.version(), - unit.pkg.name(), - current_version, - ) - } else { - String::new() - }; - - anyhow::bail!( - "package `{}` cannot be built because it requires rustc {} or newer, \ - while the currently active rustc version is {}\n{}", - unit.pkg, - version, - current_version, - guidance, - ); - } - } - - let bcx = BuildContext::new( - ws, - pkg_set, - build_config, - profiles, - extra_compiler_args, - target_data, - units, - unit_graph, - scrape_units, - )?; - - Ok(bcx) -} - -impl FilterRule { - pub fn new(targets: Vec, all: bool) -> FilterRule { - if all { - FilterRule::All - } else { - FilterRule::Just(targets) - } - } - - pub fn none() -> FilterRule { - FilterRule::Just(Vec::new()) - } - - fn matches(&self, target: &Target) -> bool { - match *self { - FilterRule::All => true, - FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()), - } - } - - fn is_specific(&self) -> bool { - match *self { - FilterRule::All => true, - FilterRule::Just(ref targets) => !targets.is_empty(), - } - } - - pub fn try_collect(&self) -> Option> { - match *self { - FilterRule::All => None, - FilterRule::Just(ref targets) => Some(targets.clone()), - } - } - - pub(crate) fn contains_glob_patterns(&self) -> bool { - match self { - FilterRule::All => false, - FilterRule::Just(targets) => targets.iter().any(is_glob_pattern), - } - } -} - -impl CompileFilter { - /// Constructs a filter from raw command line arguments. - pub fn from_raw_arguments( - lib_only: bool, - bins: Vec, - all_bins: bool, - tsts: Vec, - all_tsts: bool, - exms: Vec, - all_exms: bool, - bens: Vec, - all_bens: bool, - all_targets: bool, - ) -> CompileFilter { - if all_targets { - return CompileFilter::new_all_targets(); - } - let rule_lib = if lib_only { - LibRule::True - } else { - LibRule::False - }; - let rule_bins = FilterRule::new(bins, all_bins); - let rule_tsts = FilterRule::new(tsts, all_tsts); - let rule_exms = FilterRule::new(exms, all_exms); - let rule_bens = FilterRule::new(bens, all_bens); - - CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens) - } - - /// Constructs a filter from underlying primitives. - pub fn new( - rule_lib: LibRule, - rule_bins: FilterRule, - rule_tsts: FilterRule, - rule_exms: FilterRule, - rule_bens: FilterRule, - ) -> CompileFilter { - if rule_lib == LibRule::True - || rule_bins.is_specific() - || rule_tsts.is_specific() - || rule_exms.is_specific() - || rule_bens.is_specific() - { - CompileFilter::Only { - all_targets: false, - lib: rule_lib, - bins: rule_bins, - examples: rule_exms, - benches: rule_bens, - tests: rule_tsts, - } - } else { - CompileFilter::Default { - required_features_filterable: true, - } - } - } - - /// Constructs a filter that includes all targets. - pub fn new_all_targets() -> CompileFilter { - CompileFilter::Only { - all_targets: true, - lib: LibRule::Default, - bins: FilterRule::All, - examples: FilterRule::All, - benches: FilterRule::All, - tests: FilterRule::All, - } - } - - /// Constructs a filter that includes all test targets. - /// - /// Being different from the behavior of [`CompileFilter::Default`], this - /// function only recognizes test targets, which means cargo might compile - /// all targets with `tested` flag on, whereas [`CompileFilter::Default`] - /// may include additional example targets to ensure they can be compiled. - /// - /// Note that the actual behavior is subject to `filter_default_targets` - /// and `generate_targets` though. - pub fn all_test_targets() -> Self { - Self::Only { - all_targets: false, - lib: LibRule::Default, - bins: FilterRule::none(), - examples: FilterRule::none(), - tests: FilterRule::All, - benches: FilterRule::none(), - } - } - - /// Constructs a filter that includes lib target only. - pub fn lib_only() -> Self { - Self::Only { - all_targets: false, - lib: LibRule::True, - bins: FilterRule::none(), - examples: FilterRule::none(), - tests: FilterRule::none(), - benches: FilterRule::none(), - } - } - - /// Constructs a filter that includes the given binary. No more. No less. - pub fn single_bin(bin: String) -> Self { - Self::Only { - all_targets: false, - lib: LibRule::False, - bins: FilterRule::new(vec![bin], false), - examples: FilterRule::none(), - tests: FilterRule::none(), - benches: FilterRule::none(), - } - } - - /// Indicates if Cargo needs to build any dev dependency. - pub fn need_dev_deps(&self, mode: CompileMode) -> bool { - match mode { - CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true, - CompileMode::Check { test: true } => true, - CompileMode::Build - | CompileMode::Doc { .. } - | CompileMode::Docscrape - | CompileMode::Check { test: false } => match *self { - CompileFilter::Default { .. } => false, - CompileFilter::Only { - ref examples, - ref tests, - ref benches, - .. - } => examples.is_specific() || tests.is_specific() || benches.is_specific(), - }, - CompileMode::RunCustomBuild => panic!("Invalid mode"), - } - } - - /// Selects targets for "cargo run". for logic to select targets for other - /// subcommands, see `generate_targets` and `filter_default_targets`. - pub fn target_run(&self, target: &Target) -> bool { - match *self { - CompileFilter::Default { .. } => true, - CompileFilter::Only { - ref lib, - ref bins, - ref examples, - ref tests, - ref benches, - .. - } => { - let rule = match *target.kind() { - TargetKind::Bin => bins, - TargetKind::Test => tests, - TargetKind::Bench => benches, - TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples, - TargetKind::Lib(..) => { - return match *lib { - LibRule::True => true, - LibRule::Default => true, - LibRule::False => false, - }; - } - TargetKind::CustomBuild => return false, - }; - rule.matches(target) - } - } - } - - pub fn is_specific(&self) -> bool { - match *self { - CompileFilter::Default { .. } => false, - CompileFilter::Only { .. } => true, - } - } - - pub fn is_all_targets(&self) -> bool { - matches!( - *self, - CompileFilter::Only { - all_targets: true, - .. - } - ) - } - - pub(crate) fn contains_glob_patterns(&self) -> bool { - match self { - CompileFilter::Default { .. } => false, - CompileFilter::Only { - bins, - examples, - tests, - benches, - .. - } => { - bins.contains_glob_patterns() - || examples.contains_glob_patterns() - || tests.contains_glob_patterns() - || benches.contains_glob_patterns() - } - } - } -} - -/// A proposed target. -/// -/// Proposed targets are later filtered into actual `Unit`s based on whether or -/// not the target requires its features to be present. -#[derive(Debug)] -struct Proposal<'a> { - pkg: &'a Package, - target: &'a Target, - /// Indicates whether or not all required features *must* be present. If - /// false, and the features are not available, then it will be silently - /// skipped. Generally, targets specified by name (`--bin foo`) are - /// required, all others can be silently skipped if features are missing. - requires_features: bool, - mode: CompileMode, -} - -/// Generates all the base targets for the packages the user has requested to -/// compile. Dependencies for these targets are computed later in `unit_dependencies`. -fn generate_targets( - ws: &Workspace<'_>, - packages: &[&Package], - filter: &CompileFilter, - requested_kinds: &[CompileKind], - explicit_host_kind: CompileKind, - mode: CompileMode, - resolve: &Resolve, - workspace_resolve: &Option, - resolved_features: &features::ResolvedFeatures, - package_set: &PackageSet<'_>, - profiles: &Profiles, - interner: &UnitInterner, -) -> CargoResult> { - let config = ws.config(); - // Helper for creating a list of `Unit` structures - let new_unit = |units: &mut HashSet, - pkg: &Package, - target: &Target, - initial_target_mode: CompileMode| { - // Custom build units are added in `build_unit_dependencies`. - assert!(!target.is_custom_build()); - let target_mode = match initial_target_mode { - CompileMode::Test => { - if target.is_example() && !filter.is_specific() && !target.tested() { - // Examples are included as regular binaries to verify - // that they compile. - CompileMode::Build - } else { - CompileMode::Test - } - } - CompileMode::Build => match *target.kind() { - TargetKind::Test => CompileMode::Test, - TargetKind::Bench => CompileMode::Bench, - _ => CompileMode::Build, - }, - // `CompileMode::Bench` is only used to inform `filter_default_targets` - // which command is being used (`cargo bench`). Afterwards, tests - // and benches are treated identically. Switching the mode allows - // de-duplication of units that are essentially identical. For - // example, `cargo build --all-targets --release` creates the units - // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench) - // and since these are the same, we want them to be de-duplicated in - // `unit_dependencies`. - CompileMode::Bench => CompileMode::Test, - _ => initial_target_mode, - }; - - let is_local = pkg.package_id().source_id().is_path(); - - // No need to worry about build-dependencies, roots are never build dependencies. - let features_for = FeaturesFor::from_for_host(target.proc_macro()); - let features = resolved_features.activated_features(pkg.package_id(), features_for); - - // If `--target` has not been specified, then the unit - // graph is built almost like if `--target $HOST` was - // specified. See `rebuild_unit_graph_shared` for more on - // why this is done. However, if the package has its own - // `package.target` key, then this gets used instead of - // `$HOST` - let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() { - vec![k] - } else { - requested_kinds - .iter() - .map(|kind| match kind { - CompileKind::Host => { - pkg.manifest().default_kind().unwrap_or(explicit_host_kind) - } - CompileKind::Target(t) => CompileKind::Target(*t), - }) - .collect() - }; - - for kind in explicit_kinds.iter() { - let unit_for = if initial_target_mode.is_any_test() { - // NOTE: the `UnitFor` here is subtle. If you have a profile - // with `panic` set, the `panic` flag is cleared for - // tests/benchmarks and their dependencies. If this - // was `normal`, then the lib would get compiled three - // times (once with panic, once without, and once with - // `--test`). - // - // This would cause a problem for doc tests, which would fail - // because `rustdoc` would attempt to link with both libraries - // at the same time. Also, it's probably not important (or - // even desirable?) for rustdoc to link with a lib with - // `panic` set. - // - // As a consequence, Examples and Binaries get compiled - // without `panic` set. This probably isn't a bad deal. - // - // Forcing the lib to be compiled three times during `cargo - // test` is probably also not desirable. - UnitFor::new_test(config, *kind) - } else if target.for_host() { - // Proc macro / plugin should not have `panic` set. - UnitFor::new_compiler(*kind) - } else { - UnitFor::new_normal(*kind) - }; - let profile = profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - is_local, - unit_for, - *kind, - ); - let unit = interner.intern( - pkg, - target, - profile, - kind.for_target(target), - target_mode, - features.clone(), - /*is_std*/ false, - /*dep_hash*/ 0, - IsArtifact::No, - ); - units.insert(unit); - } - }; - - // Create a list of proposed targets. - let mut proposals: Vec> = Vec::new(); - - match *filter { - CompileFilter::Default { - required_features_filterable, - } => { - for pkg in packages { - let default = filter_default_targets(pkg.targets(), mode); - proposals.extend(default.into_iter().map(|target| Proposal { - pkg, - target, - requires_features: !required_features_filterable, - mode, - })); - if mode == CompileMode::Test { - if let Some(t) = pkg - .targets() - .iter() - .find(|t| t.is_lib() && t.doctested() && t.doctestable()) - { - proposals.push(Proposal { - pkg, - target: t, - requires_features: false, - mode: CompileMode::Doctest, - }); - } - } - } - } - CompileFilter::Only { - all_targets, - ref lib, - ref bins, - ref examples, - ref tests, - ref benches, - } => { - if *lib != LibRule::False { - let mut libs = Vec::new(); - for proposal in filter_targets(packages, Target::is_lib, false, mode) { - let Proposal { target, pkg, .. } = proposal; - if mode.is_doc_test() && !target.doctestable() { - let types = target.rustc_crate_types(); - let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); - ws.config().shell().warn(format!( - "doc tests are not supported for crate type(s) `{}` in package `{}`", - types_str.join(", "), - pkg.name() - ))?; - } else { - libs.push(proposal) - } - } - if !all_targets && libs.is_empty() && *lib == LibRule::True { - let names = packages.iter().map(|pkg| pkg.name()).collect::>(); - if names.len() == 1 { - anyhow::bail!("no library targets found in package `{}`", names[0]); - } else { - anyhow::bail!("no library targets found in packages: {}", names.join(", ")); - } - } - proposals.extend(libs); - } - - // If `--tests` was specified, add all targets that would be - // generated by `cargo test`. - let test_filter = match tests { - FilterRule::All => Target::tested, - FilterRule::Just(_) => Target::is_test, - }; - let test_mode = match mode { - CompileMode::Build => CompileMode::Test, - CompileMode::Check { .. } => CompileMode::Check { test: true }, - _ => mode, - }; - // If `--benches` was specified, add all targets that would be - // generated by `cargo bench`. - let bench_filter = match benches { - FilterRule::All => Target::benched, - FilterRule::Just(_) => Target::is_bench, - }; - let bench_mode = match mode { - CompileMode::Build => CompileMode::Bench, - CompileMode::Check { .. } => CompileMode::Check { test: true }, - _ => mode, - }; - - proposals.extend(list_rule_targets( - packages, - bins, - "bin", - Target::is_bin, - mode, - )?); - proposals.extend(list_rule_targets( - packages, - examples, - "example", - Target::is_example, - mode, - )?); - proposals.extend(list_rule_targets( - packages, - tests, - "test", - test_filter, - test_mode, - )?); - proposals.extend(list_rule_targets( - packages, - benches, - "bench", - bench_filter, - bench_mode, - )?); - } - } - - // Only include targets that are libraries or have all required - // features available. - // - // `features_map` is a map of &Package -> enabled_features - // It is computed by the set of enabled features for the package plus - // every enabled feature of every enabled dependency. - let mut features_map = HashMap::new(); - // This needs to be a set to de-duplicate units. Due to the way the - // targets are filtered, it is possible to have duplicate proposals for - // the same thing. - let mut units = HashSet::new(); - for Proposal { - pkg, - target, - requires_features, - mode, - } in proposals - { - let unavailable_features = match target.required_features() { - Some(rf) => { - validate_required_features( - workspace_resolve, - target.name(), - rf, - pkg.summary(), - &mut config.shell(), - )?; - - let features = features_map.entry(pkg).or_insert_with(|| { - resolve_all_features(resolve, resolved_features, package_set, pkg.package_id()) - }); - rf.iter().filter(|f| !features.contains(*f)).collect() - } - None => Vec::new(), - }; - if target.is_lib() || unavailable_features.is_empty() { - new_unit(&mut units, pkg, target, mode); - } else if requires_features { - let required_features = target.required_features().unwrap(); - let quoted_required_features: Vec = required_features - .iter() - .map(|s| format!("`{}`", s)) - .collect(); - anyhow::bail!( - "target `{}` in package `{}` requires the features: {}\n\ - Consider enabling them by passing, e.g., `--features=\"{}\"`", - target.name(), - pkg.name(), - quoted_required_features.join(", "), - required_features.join(" ") - ); - } - // else, silently skip target. - } - let mut units: Vec<_> = units.into_iter().collect(); - unmatched_target_filters(&units, filter, &mut ws.config().shell())?; - - // Keep the roots in a consistent order, which helps with checking test output. - units.sort_unstable(); - Ok(units) -} - -/// Checks if the unit list is empty and the user has passed any combination of -/// --tests, --examples, --benches or --bins, and we didn't match on any targets. -/// We want to emit a warning to make sure the user knows that this run is a no-op, -/// and their code remains unchecked despite cargo not returning any errors -fn unmatched_target_filters( - units: &[Unit], - filter: &CompileFilter, - shell: &mut Shell, -) -> CargoResult<()> { - if let CompileFilter::Only { - all_targets, - lib: _, - ref bins, - ref examples, - ref tests, - ref benches, - } = *filter - { - if units.is_empty() { - let mut filters = String::new(); - let mut miss_count = 0; - - let mut append = |t: &FilterRule, s| { - if let FilterRule::All = *t { - miss_count += 1; - filters.push_str(s); - } - }; - - if all_targets { - filters.push_str(" `all-targets`"); - } else { - append(bins, " `bins`,"); - append(tests, " `tests`,"); - append(examples, " `examples`,"); - append(benches, " `benches`,"); - filters.pop(); - } - - return shell.warn(format!( - "Target {}{} specified, but no targets matched. This is a no-op", - if miss_count > 1 { "filters" } else { "filter" }, - filters, - )); - } - } - - Ok(()) -} - -/// Warns if a target's required-features references a feature that doesn't exist. -/// -/// This is a warning because historically this was not validated, and it -/// would cause too much breakage to make it an error. -fn validate_required_features( - resolve: &Option, - target_name: &str, - required_features: &[String], - summary: &Summary, - shell: &mut Shell, -) -> CargoResult<()> { - let resolve = match resolve { - None => return Ok(()), - Some(resolve) => resolve, - }; - - for feature in required_features { - let fv = FeatureValue::new(feature.into()); - match &fv { - FeatureValue::Feature(f) => { - if !summary.features().contains_key(f) { - shell.warn(format!( - "invalid feature `{}` in required-features of target `{}`: \ - `{}` is not present in [features] section", - fv, target_name, fv - ))?; - } - } - FeatureValue::Dep { .. } => { - anyhow::bail!( - "invalid feature `{}` in required-features of target `{}`: \ - `dep:` prefixed feature values are not allowed in required-features", - fv, - target_name - ); - } - FeatureValue::DepFeature { weak: true, .. } => { - anyhow::bail!( - "invalid feature `{}` in required-features of target `{}`: \ - optional dependency with `?` is not allowed in required-features", - fv, - target_name - ); - } - // Handling of dependent_crate/dependent_crate_feature syntax - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak: false, - } => { - match resolve - .deps(summary.package_id()) - .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name)) - { - Some((dep_id, _deps)) => { - let dep_summary = resolve.summary(dep_id); - if !dep_summary.features().contains_key(dep_feature) - && !dep_summary - .dependencies() - .iter() - .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional()) - { - shell.warn(format!( - "invalid feature `{}` in required-features of target `{}`: \ - feature `{}` does not exist in package `{}`", - fv, target_name, dep_feature, dep_id - ))?; - } - } - None => { - shell.warn(format!( - "invalid feature `{}` in required-features of target `{}`: \ - dependency `{}` does not exist", - fv, target_name, dep_name - ))?; - } - } - } - } - } - Ok(()) -} - -/// Gets all of the features enabled for a package, plus its dependencies' -/// features. -/// -/// Dependencies are added as `dep_name/feat_name` because `required-features` -/// wants to support that syntax. -pub fn resolve_all_features( - resolve_with_overrides: &Resolve, - resolved_features: &features::ResolvedFeatures, - package_set: &PackageSet<'_>, - package_id: PackageId, -) -> HashSet { - let mut features: HashSet = resolved_features - .activated_features(package_id, FeaturesFor::NormalOrDevOrArtifactTarget(None)) - .iter() - .map(|s| s.to_string()) - .collect(); - - // Include features enabled for use by dependencies so targets can also use them with the - // required-features field when deciding whether to be built or skipped. - for (dep_id, deps) in resolve_with_overrides.deps(package_id) { - let is_proc_macro = package_set - .get_one(dep_id) - .expect("packages downloaded") - .proc_macro(); - for dep in deps { - let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build()); - for feature in resolved_features - .activated_features_unverified(dep_id, features_for) - .unwrap_or_default() - { - features.insert(format!("{}/{}", dep.name_in_toml(), feature)); - } - } - } - - features -} - -/// Given a list of all targets for a package, filters out only the targets -/// that are automatically included when the user doesn't specify any targets. -fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> { - match mode { - CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(), - CompileMode::Test => targets - .iter() - .filter(|t| t.tested() || t.is_example()) - .collect(), - CompileMode::Build | CompileMode::Check { .. } => targets - .iter() - .filter(|t| t.is_bin() || t.is_lib()) - .collect(), - CompileMode::Doc { .. } => { - // `doc` does lib and bins (bin with same name as lib is skipped). - targets - .iter() - .filter(|t| { - t.documented() - && (!t.is_bin() - || !targets.iter().any(|l| l.is_lib() && l.name() == t.name())) - }) - .collect() - } - CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => { - panic!("Invalid mode {:?}", mode) - } - } -} - -/// Returns a list of proposed targets based on command-line target selection flags. -fn list_rule_targets<'a>( - packages: &[&'a Package], - rule: &FilterRule, - target_desc: &'static str, - is_expected_kind: fn(&Target) -> bool, - mode: CompileMode, -) -> CargoResult>> { - let mut proposals = Vec::new(); - match rule { - FilterRule::All => { - proposals.extend(filter_targets(packages, is_expected_kind, false, mode)) - } - FilterRule::Just(names) => { - for name in names { - proposals.extend(find_named_targets( - packages, - name, - target_desc, - is_expected_kind, - mode, - )?); - } - } - } - Ok(proposals) -} - -/// Finds the targets for a specifically named target. -fn find_named_targets<'a>( - packages: &[&'a Package], - target_name: &str, - target_desc: &'static str, - is_expected_kind: fn(&Target) -> bool, - mode: CompileMode, -) -> CargoResult>> { - let is_glob = is_glob_pattern(target_name); - let proposals = if is_glob { - let pattern = build_glob(target_name)?; - let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name()); - filter_targets(packages, filter, true, mode) - } else { - let filter = |t: &Target| t.name() == target_name && is_expected_kind(t); - filter_targets(packages, filter, true, mode) - }; - - if proposals.is_empty() { - let targets = packages - .iter() - .flat_map(|pkg| { - pkg.targets() - .iter() - .filter(|target| is_expected_kind(target)) - }) - .collect::>(); - let suggestion = closest_msg(target_name, targets.iter(), |t| t.name()); - if !suggestion.is_empty() { - anyhow::bail!( - "no {} target {} `{}`{}", - target_desc, - if is_glob { "matches pattern" } else { "named" }, - target_name, - suggestion - ); - } else { - let mut msg = String::new(); - writeln!( - msg, - "no {} target {} `{}`.", - target_desc, - if is_glob { "matches pattern" } else { "named" }, - target_name, - )?; - if !targets.is_empty() { - writeln!(msg, "Available {} targets:", target_desc)?; - for target in targets { - writeln!(msg, " {}", target.name())?; - } - } - anyhow::bail!(msg); - } - } - Ok(proposals) -} - -fn filter_targets<'a>( - packages: &[&'a Package], - predicate: impl Fn(&Target) -> bool, - requires_features: bool, - mode: CompileMode, -) -> Vec> { - let mut proposals = Vec::new(); - for pkg in packages { - for target in pkg.targets().iter().filter(|t| predicate(t)) { - proposals.push(Proposal { - pkg, - target, - requires_features, - mode, - }); - } - } - proposals -} - -/// This is used to rebuild the unit graph, sharing host dependencies if possible. -/// -/// This will translate any unit's `CompileKind::Target(host)` to -/// `CompileKind::Host` if the kind is equal to `to_host`. This also handles -/// generating the unit `dep_hash`, and merging shared units if possible. -/// -/// This is necessary because if normal dependencies used `CompileKind::Host`, -/// there would be no way to distinguish those units from build-dependency -/// units. This can cause a problem if a shared normal/build dependency needs -/// to link to another dependency whose features differ based on whether or -/// not it is a normal or build dependency. If both units used -/// `CompileKind::Host`, then they would end up being identical, causing a -/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one -/// value or the other. -/// -/// The solution is to keep normal and build dependencies separate when -/// building the unit graph, and then run this second pass which will try to -/// combine shared dependencies safely. By adding a hash of the dependencies -/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host` -/// without fear of an unwanted collision. -fn rebuild_unit_graph_shared( - interner: &UnitInterner, - unit_graph: UnitGraph, - roots: &[Unit], - scrape_units: &[Unit], - to_host: CompileKind, -) -> (Vec, Vec, UnitGraph) { - let mut result = UnitGraph::new(); - // Map of the old unit to the new unit, used to avoid recursing into units - // that have already been computed to improve performance. - let mut memo = HashMap::new(); - let new_roots = roots - .iter() - .map(|root| { - traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host) - }) - .collect(); - let new_scrape_units = scrape_units - .iter() - .map(|unit| memo.get(unit).unwrap().clone()) - .collect(); - (new_roots, new_scrape_units, result) -} - -/// Recursive function for rebuilding the graph. -/// -/// This walks `unit_graph`, starting at the given `unit`. It inserts the new -/// units into `new_graph`, and returns a new updated version of the given -/// unit (`dep_hash` is filled in, and `kind` switched if necessary). -fn traverse_and_share( - interner: &UnitInterner, - memo: &mut HashMap, - new_graph: &mut UnitGraph, - unit_graph: &UnitGraph, - unit: &Unit, - to_host: CompileKind, -) -> Unit { - if let Some(new_unit) = memo.get(unit) { - // Already computed, no need to recompute. - return new_unit.clone(); - } - let mut dep_hash = StableHasher::new(); - let new_deps: Vec<_> = unit_graph[unit] - .iter() - .map(|dep| { - let new_dep_unit = - traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host); - new_dep_unit.hash(&mut dep_hash); - UnitDep { - unit: new_dep_unit, - ..dep.clone() - } - }) - .collect(); - let new_dep_hash = dep_hash.finish(); - let new_kind = if unit.kind == to_host { - CompileKind::Host - } else { - unit.kind - }; - let new_unit = interner.intern( - &unit.pkg, - &unit.target, - unit.profile.clone(), - new_kind, - unit.mode, - unit.features.clone(), - unit.is_std, - new_dep_hash, - unit.artifact, - ); - assert!(memo.insert(unit.clone(), new_unit.clone()).is_none()); - new_graph.entry(new_unit.clone()).or_insert(new_deps); - new_unit -} - -/// Build `glob::Pattern` with informative context. -fn build_glob(pat: &str) -> CargoResult { - glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat)) -} - -/// Emits "package not found" error. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn emit_package_not_found( - ws: &Workspace<'_>, - opt_names: BTreeSet<&str>, - opt_out: bool, -) -> CargoResult<()> { - if !opt_names.is_empty() { - anyhow::bail!( - "{}package(s) `{}` not found in workspace `{}`", - if opt_out { "excluded " } else { "" }, - opt_names.into_iter().collect::>().join(", "), - ws.root().display(), - ) - } - Ok(()) -} - -/// Emits "glob pattern not found" error. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn emit_pattern_not_found( - ws: &Workspace<'_>, - opt_patterns: Vec<(glob::Pattern, bool)>, - opt_out: bool, -) -> CargoResult<()> { - let not_matched = opt_patterns - .iter() - .filter(|(_, matched)| !*matched) - .map(|(pat, _)| pat.as_str()) - .collect::>(); - if !not_matched.is_empty() { - anyhow::bail!( - "{}package pattern(s) `{}` not found in workspace `{}`", - if opt_out { "excluded " } else { "" }, - not_matched.join(", "), - ws.root().display(), - ) - } - Ok(()) -} - -/// Checks whether a package matches any of a list of glob patterns generated -/// from `opt_patterns_and_names`. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool { - patterns.iter_mut().any(|(m, matched)| { - let is_matched = m.matches(pkg.name().as_str()); - *matched |= is_matched; - is_matched - }) -} - -/// Given a list opt-in or opt-out package selection strings, generates two -/// collections that represent glob patterns and package names respectively. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn opt_patterns_and_names( - opt: &[String], -) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> { - let mut opt_patterns = Vec::new(); - let mut opt_names = BTreeSet::new(); - for x in opt.iter() { - if is_glob_pattern(x) { - opt_patterns.push((build_glob(x)?, false)); - } else { - opt_names.insert(String::as_str(x)); - } - } - Ok((opt_patterns, opt_names)) -} - -/// Removes duplicate CompileMode::Doc units that would cause problems with -/// filename collisions. -/// -/// Rustdoc only separates units by crate name in the file directory -/// structure. If any two units with the same crate name exist, this would -/// cause a filename collision, causing different rustdoc invocations to stomp -/// on one another's files. -/// -/// Unfortunately this does not remove all duplicates, as some of them are -/// either user error, or difficult to remove. Cases that I can think of: -/// -/// - Same target name in different packages. See the `collision_doc` test. -/// - Different sources. See `collision_doc_sources` test. -/// -/// Ideally this would not be necessary. -fn remove_duplicate_doc( - build_config: &BuildConfig, - root_units: &[Unit], - unit_graph: &mut UnitGraph, -) { - // First, create a mapping of crate_name -> Unit so we can see where the - // duplicates are. - let mut all_docs: HashMap> = HashMap::new(); - for unit in unit_graph.keys() { - if unit.mode.is_doc() { - all_docs - .entry(unit.target.crate_name()) - .or_default() - .push(unit.clone()); - } - } - // Keep track of units to remove so that they can be efficiently removed - // from the unit_deps. - let mut removed_units: HashSet = HashSet::new(); - let mut remove = |units: Vec, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec { - let (to_remove, remaining_units): (Vec, Vec) = units - .into_iter() - .partition(|unit| cb(unit) && !root_units.contains(unit)); - for unit in to_remove { - log::debug!( - "removing duplicate doc due to {} for package {} target `{}`", - reason, - unit.pkg, - unit.target.name() - ); - unit_graph.remove(&unit); - removed_units.insert(unit); - } - remaining_units - }; - // Iterate over the duplicates and try to remove them from unit_graph. - for (_crate_name, mut units) in all_docs { - if units.len() == 1 { - continue; - } - // Prefer target over host if --target was not specified. - if build_config - .requested_kinds - .iter() - .all(CompileKind::is_host) - { - // Note these duplicates may not be real duplicates, since they - // might get merged in rebuild_unit_graph_shared. Either way, it - // shouldn't hurt to remove them early (although the report in the - // log might be confusing). - units = remove(units, "host/target merger", &|unit| unit.kind.is_host()); - if units.len() == 1 { - continue; - } - } - // Prefer newer versions over older. - let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec> = - HashMap::new(); - for unit in units { - let pkg_id = unit.pkg.package_id(); - // Note, this does not detect duplicates from different sources. - source_map - .entry((pkg_id.name(), pkg_id.source_id(), unit.kind)) - .or_default() - .push(unit); - } - let mut remaining_units = Vec::new(); - for (_key, mut units) in source_map { - if units.len() > 1 { - units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap()); - // Remove any entries with version < newest. - let newest_version = units.last().unwrap().pkg.version().clone(); - let keep_units = remove(units, "older version", &|unit| { - unit.pkg.version() < &newest_version - }); - remaining_units.extend(keep_units); - } else { - remaining_units.extend(units); - } - } - if remaining_units.len() == 1 { - continue; - } - // Are there other heuristics to remove duplicates that would make - // sense? Maybe prefer path sources over all others? - } - // Also remove units from the unit_deps so there aren't any dangling edges. - for unit_deps in unit_graph.values_mut() { - unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit)); - } - // Remove any orphan units that were detached from the graph. - let mut visited = HashSet::new(); - fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet) { - if !visited.insert(unit.clone()) { - return; - } - for dep in &graph[unit] { - visit(&dep.unit, graph, visited); - } - } - for unit in root_units { - visit(unit, unit_graph, &mut visited); - } - unit_graph.retain(|unit, _| visited.contains(unit)); -} - -/// Override crate types for given units. -/// -/// This is primarily used by `cargo rustc --crate-type`. -fn override_rustc_crate_types( - units: &mut [Unit], - args: &[String], - interner: &UnitInterner, -) -> CargoResult<()> { - if units.len() != 1 { - anyhow::bail!( - "crate types to rustc can only be passed to one \ - target, consider filtering\nthe package by passing, \ - e.g., `--lib` or `--example` to specify a single target" - ); - } - - let unit = &units[0]; - let override_unit = |f: fn(Vec) -> TargetKind| { - let crate_types = args.iter().map(|s| s.into()).collect(); - let mut target = unit.target.clone(); - target.set_kind(f(crate_types)); - interner.intern( - &unit.pkg, - &target, - unit.profile.clone(), - unit.kind, - unit.mode, - unit.features.clone(), - unit.is_std, - unit.dep_hash, - unit.artifact, - ) - }; - units[0] = match unit.target.kind() { - TargetKind::Lib(_) => override_unit(TargetKind::Lib), - TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib), - _ => { - anyhow::bail!( - "crate types can only be specified for libraries and example libraries.\n\ - Binaries, tests, and benchmarks are always the `bin` crate type" - ); - } - }; - - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_package.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_package.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_package.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_package.rs 2023-01-10 13:41:19.000000000 +0000 @@ -829,7 +829,6 @@ target_rustdoc_args: None, target_rustc_args: rustc_args, target_rustc_crate_types: None, - local_rustdoc_args: None, rustdoc_document_private_items: false, honor_rust_version: true, }, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_remove.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_remove.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/cargo_remove.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/cargo_remove.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,65 @@ +//! Core of cargo-remove command + +use crate::core::Package; +use crate::util::toml_mut::manifest::DepTable; +use crate::util::toml_mut::manifest::LocalManifest; +use crate::CargoResult; +use crate::Config; + +/// Remove a dependency from a Cargo.toml manifest file. +#[derive(Debug)] +pub struct RemoveOptions<'a> { + /// Configuration information for Cargo operations + pub config: &'a Config, + /// Package to remove dependencies from + pub spec: &'a Package, + /// Dependencies to remove + pub dependencies: Vec, + /// Which dependency section to remove these from + pub section: DepTable, + /// Whether or not to actually write the manifest + pub dry_run: bool, +} + +/// Remove dependencies from a manifest +pub fn remove(options: &RemoveOptions<'_>) -> CargoResult<()> { + let dep_table = options + .section + .to_table() + .into_iter() + .map(String::from) + .collect::>(); + + let manifest_path = options.spec.manifest_path().to_path_buf(); + let mut manifest = LocalManifest::try_new(&manifest_path)?; + + for dep in &options.dependencies { + let section = if dep_table.len() >= 3 { + format!("{} for target `{}`", &dep_table[2], &dep_table[1]) + } else { + dep_table[0].clone() + }; + options + .config + .shell() + .status("Removing", format!("{dep} from {section}"))?; + + manifest.remove_from_table(&dep_table, dep)?; + + // Now that we have removed the crate, if that was the last reference to that + // crate, then we need to drop any explicitly activated features on + // that crate. + manifest.gc_dep(dep); + } + + if options.dry_run { + options + .config + .shell() + .warn("aborting remove due to dry run")?; + } else { + manifest.write()?; + } + + Ok(()) +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/common_for_install_and_uninstall.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/common_for_install_and_uninstall.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/common_for_install_and_uninstall.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/common_for_install_and_uninstall.rs 2023-01-10 13:41:19.000000000 +0000 @@ -7,10 +7,12 @@ use std::task::Poll; use anyhow::{bail, format_err, Context as _}; +use ops::FilterRule; use serde::{Deserialize, Serialize}; use toml_edit::easy as toml; use crate::core::compiler::Freshness; +use crate::core::Target; use crate::core::{Dependency, FeatureValue, Package, PackageId, QueryKind, Source, SourceId}; use crate::ops::{self, CompileFilter, CompileOptions}; use crate::sources::PathSource; @@ -690,20 +692,17 @@ ref examples, .. } => { - let all_bins: Vec = bins.try_collect().unwrap_or_else(|| { - pkg.targets() + let collect = |rule: &_, f: fn(&Target) -> _| match rule { + FilterRule::All => pkg + .targets() .iter() - .filter(|t| t.is_bin()) - .map(|t| t.name().to_string()) - .collect() - }); - let all_examples: Vec = examples.try_collect().unwrap_or_else(|| { - pkg.targets() - .iter() - .filter(|t| t.is_exe_example()) - .map(|t| t.name().to_string()) - .collect() - }); + .filter(|t| f(t)) + .map(|t| t.name().into()) + .collect(), + FilterRule::Just(targets) => targets.clone(), + }; + let all_bins = collect(bins, Target::is_bin); + let all_examples = collect(examples, Target::is_exe_example); all_bins .iter() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -45,6 +45,7 @@ mod cargo_package; mod cargo_pkgid; mod cargo_read_manifest; +pub mod cargo_remove; mod cargo_run; mod cargo_test; mod cargo_uninstall; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/registry.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/registry.rs 2023-01-10 13:41:19.000000000 +0000 @@ -18,9 +18,11 @@ use termcolor::ColorSpec; use crate::core::dependency::DepKind; +use crate::core::dependency::Dependency; use crate::core::manifest::ManifestMetadata; use crate::core::resolver::CliFeatures; use crate::core::source::Source; +use crate::core::QueryKind; use crate::core::{Package, SourceId, Workspace}; use crate::ops; use crate::ops::Packages; @@ -28,7 +30,7 @@ use crate::util::config::{self, Config, SslVersionConfig, SslVersionConfigRange}; use crate::util::errors::CargoResult; use crate::util::important_paths::find_root_manifest_for_wd; -use crate::util::IntoUrl; +use crate::util::{truncate_with_ellipsis, IntoUrl}; use crate::{drop_print, drop_println, version}; mod auth; @@ -132,17 +134,23 @@ let reg_name = publish_registry .clone() .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string()); - if !allowed_registries.contains(®_name) { + if allowed_registries.is_empty() { bail!( "`{}` cannot be published.\n\ - The registry `{}` is not listed in the `publish` value in Cargo.toml.", + `package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing.", + pkg.name(), + ); + } else if !allowed_registries.contains(®_name) { + bail!( + "`{}` cannot be published.\n\ + The registry `{}` is not listed in the `package.publish` value in Cargo.toml.", pkg.name(), reg_name ); } } - let (mut registry, _reg_cfg, reg_id) = registry( + let (mut registry, _reg_cfg, reg_ids) = registry( opts.config, opts.token.clone(), opts.index.as_deref(), @@ -150,7 +158,7 @@ true, !opts.dry_run, )?; - verify_dependencies(pkg, ®istry, reg_id)?; + verify_dependencies(pkg, ®istry, reg_ids.original)?; // Prepare a tarball, with a non-suppressible warning if metadata // is missing since this is being put online. @@ -180,9 +188,22 @@ pkg, tarball.file(), &mut registry, - reg_id, + reg_ids.original, opts.dry_run, )?; + if !opts.dry_run { + const DEFAULT_TIMEOUT: u64 = 60; + let timeout = if opts.config.cli_unstable().publish_timeout { + let timeout: Option = opts.config.get("publish.timeout")?; + timeout.unwrap_or(DEFAULT_TIMEOUT) + } else { + DEFAULT_TIMEOUT + }; + if 0 < timeout { + let timeout = std::time::Duration::from_secs(timeout); + wait_for_publish(opts.config, reg_ids.original, pkg, timeout)?; + } + } Ok(()) } @@ -209,7 +230,7 @@ // This extra hostname check is mostly to assist with testing, // but also prevents someone using `--index` to specify // something that points to crates.io. - if registry_src.is_default_registry() || registry.host_is_crates_io() { + if registry_src.is_crates_io() || registry.host_is_crates_io() { bail!("crates cannot be published to crates.io with dependencies sourced from other\n\ registries. `{}` needs to be published to crates.io before publishing this crate.\n\ (crate `{}` is pulled from {})", @@ -374,6 +395,72 @@ Ok(()) } +fn wait_for_publish( + config: &Config, + registry_src: SourceId, + pkg: &Package, + timeout: std::time::Duration, +) -> CargoResult<()> { + let version_req = format!("={}", pkg.version()); + let mut source = SourceConfigMap::empty(config)?.load(registry_src, &HashSet::new())?; + let source_description = source.describe(); + let query = Dependency::parse(pkg.name(), Some(&version_req), registry_src)?; + + let now = std::time::Instant::now(); + let sleep_time = std::time::Duration::from_secs(1); + let mut logged = false; + loop { + { + let _lock = config.acquire_package_cache_lock()?; + // Force re-fetching the source + // + // As pulling from a git source is expensive, we track when we've done it within the + // process to only do it once, but we are one of the rare cases that needs to do it + // multiple times + config + .updated_sources() + .remove(&source.replaced_source_id()); + source.invalidate_cache(); + let summaries = loop { + // Exact to avoid returning all for path/git + match source.query_vec(&query, QueryKind::Exact) { + std::task::Poll::Ready(res) => { + break res?; + } + std::task::Poll::Pending => source.block_until_ready()?, + } + }; + if !summaries.is_empty() { + break; + } + } + + if timeout < now.elapsed() { + config.shell().warn(format!( + "timed out waiting for `{}` to be in {}", + pkg.name(), + source_description + ))?; + break; + } + + if !logged { + config.shell().status( + "Waiting", + format!( + "on `{}` to propagate to {} (ctrl-c to wait asynchronously)", + pkg.name(), + source_description + ), + )?; + logged = true; + } + std::thread::sleep(sleep_time); + } + + Ok(()) +} + /// Returns the index and token from the config file for the given registry. /// /// `registry` is typically the registry specified on the command-line. If @@ -391,6 +478,22 @@ }; // `registry.default` is handled in command-line parsing. let (token, process) = match registry { + Some("crates-io") | None => { + // Use crates.io default. + config.check_registry_index_not_set()?; + let token = config.get_string("registry.token")?.map(|p| p.val); + let process = if config.cli_unstable().credential_process { + let process = + config.get::>("registry.credential-process")?; + if token.is_some() && process.is_some() { + return err_both("registry.token", "registry.credential-process"); + } + process + } else { + None + }; + (token, process) + } Some(registry) => { let token_key = format!("registries.{registry}.token"); let token = config.get_string(&token_key)?.map(|p| p.val); @@ -411,22 +514,6 @@ }; (token, process) } - None => { - // Use crates.io default. - config.check_registry_index_not_set()?; - let token = config.get_string("registry.token")?.map(|p| p.val); - let process = if config.cli_unstable().credential_process { - let process = - config.get::>("registry.credential-process")?; - if token.is_some() && process.is_some() { - return err_both("registry.token", "registry.credential-process"); - } - process - } else { - None - }; - (token, process) - } }; let credential_process = @@ -444,11 +531,9 @@ /// /// * `token`: The token from the command-line. If not set, uses the token /// from the config. -/// * `index`: The index URL from the command-line. This is ignored if -/// `registry` is set. +/// * `index`: The index URL from the command-line. /// * `registry`: The registry name from the command-line. If neither -/// `registry`, or `index` are set, then uses `crates-io`, honoring -/// `[source]` replacement if defined. +/// `registry`, or `index` are set, then uses `crates-io`. /// * `force_update`: If `true`, forces the index to be updated. /// * `validate_token`: If `true`, the token must be set. fn registry( @@ -458,28 +543,12 @@ registry: Option<&str>, force_update: bool, validate_token: bool, -) -> CargoResult<(Registry, RegistryConfig, SourceId)> { - if index.is_some() && registry.is_some() { - // Otherwise we would silently ignore one or the other. - bail!("both `--index` and `--registry` should not be set at the same time"); - } - // Parse all configuration options +) -> CargoResult<(Registry, RegistryConfig, RegistrySourceIds)> { + let source_ids = get_source_id(config, index, registry)?; let reg_cfg = registry_configuration(config, registry)?; - let opt_index = registry - .map(|r| config.get_registry_index(r)) - .transpose()? - .map(|u| u.to_string()); - let sid = get_source_id(config, opt_index.as_deref().or(index), registry)?; - if !sid.is_remote_registry() { - bail!( - "{} does not support API commands.\n\ - Check for a source-replacement in .cargo/config.", - sid - ); - } let api_host = { let _lock = config.acquire_package_cache_lock()?; - let mut src = RegistrySource::remote(sid, &HashSet::new(), config)?; + let mut src = RegistrySource::remote(source_ids.replacement, &HashSet::new(), config)?; // Only update the index if the config is not available or `force` is set. if force_update { src.invalidate_cache() @@ -488,13 +557,14 @@ match src.config()? { Poll::Pending => src .block_until_ready() - .with_context(|| format!("failed to update {}", sid))?, + .with_context(|| format!("failed to update {}", source_ids.replacement))?, Poll::Ready(cfg) => break cfg, } }; - cfg.and_then(|cfg| cfg.api) - .ok_or_else(|| format_err!("{} does not support API commands", sid))? + cfg.and_then(|cfg| cfg.api).ok_or_else(|| { + format_err!("{} does not support API commands", source_ids.replacement) + })? }; let token = if validate_token { if index.is_some() { @@ -503,42 +573,18 @@ } token } else { - // Check `is_default_registry` so that the crates.io index can - // change config.json's "api" value, and this won't affect most - // people. It will affect those using source replacement, but - // hopefully that's a relatively small set of users. - if token.is_none() - && reg_cfg.is_token() - && registry.is_none() - && !sid.is_default_registry() - && !crates_io::is_url_crates_io(&api_host) - { - config.shell().warn( - "using `registry.token` config value with source \ - replacement is deprecated\n\ - This may become a hard error in the future; \ - see .\n\ - Use the --token command-line flag to remove this warning.", - )?; - reg_cfg.as_token().map(|t| t.to_owned()) - } else { - let token = - auth::auth_token(config, token.as_deref(), ®_cfg, registry, &api_host)?; - Some(token) - } + let token = auth::auth_token(config, token.as_deref(), ®_cfg, registry, &api_host)?; + Some(token) } } else { None }; let handle = http_handle(config)?; - // Workaround for the sparse+https://index.crates.io replacement index. Use the non-replaced - // source_id so that the original (github) url is used when publishing a crate. - let sid = if sid.is_default_registry() { - SourceId::crates_io(config)? - } else { - sid - }; - Ok((Registry::new_handle(api_host, token, handle), reg_cfg, sid)) + Ok(( + Registry::new_handle(api_host, token, handle), + reg_cfg, + source_ids, + )) } /// Creates a new HTTP handle with appropriate global configuration for cargo. @@ -597,6 +643,9 @@ handle.useragent(&format!("cargo {}", version()))?; } + // Empty string accept encoding expands to the encodings supported by the current libcurl. + handle.accept_encoding("")?; + fn to_ssl_version(s: &str) -> CargoResult { let version = match s { "default" => SslVersion::Default, @@ -768,6 +817,10 @@ } }; + if token.is_empty() { + bail!("please provide a non-empty token"); + } + if let RegistryConfig::Token(old_token) = ®_cfg { if old_token == &token { config.shell().status("Login", "already logged in")?; @@ -943,19 +996,64 @@ /// Gets the SourceId for an index or registry setting. /// /// The `index` and `reg` values are from the command-line or config settings. -/// If both are None, returns the source for crates.io. -fn get_source_id(config: &Config, index: Option<&str>, reg: Option<&str>) -> CargoResult { - match (reg, index) { - (Some(r), _) => SourceId::alt_registry(config, r), - (_, Some(i)) => SourceId::for_registry(&i.into_url()?), - _ => { - let map = SourceConfigMap::new(config)?; - let src = map.load(SourceId::crates_io(config)?, &HashSet::new())?; - Ok(src.replaced_source_id()) +/// If both are None, and no source-replacement is configured, returns the source for crates.io. +/// If both are None, and source replacement is configured, returns an error. +/// +/// The source for crates.io may be GitHub, index.crates.io, or a test-only registry depending +/// on configuration. +/// +/// If `reg` is set, source replacement is not followed. +/// +/// The return value is a pair of `SourceId`s: The first may be a built-in replacement of +/// crates.io (such as index.crates.io), while the second is always the original source. +fn get_source_id( + config: &Config, + index: Option<&str>, + reg: Option<&str>, +) -> CargoResult { + let sid = match (reg, index) { + (None, None) => SourceId::crates_io(config)?, + (Some(r), None) => SourceId::alt_registry(config, r)?, + (None, Some(i)) => SourceId::for_registry(&i.into_url()?)?, + (Some(_), Some(_)) => { + bail!("both `--index` and `--registry` should not be set at the same time") } + }; + // Load source replacements that are built-in to Cargo. + let builtin_replacement_sid = SourceConfigMap::empty(config)? + .load(sid, &HashSet::new())? + .replaced_source_id(); + let replacement_sid = SourceConfigMap::new(config)? + .load(sid, &HashSet::new())? + .replaced_source_id(); + if reg.is_none() && index.is_none() && replacement_sid != builtin_replacement_sid { + // Neither --registry nor --index was passed and the user has configured source-replacement. + if let Some(replacement_name) = replacement_sid.alt_registry_key() { + bail!("crates-io is replaced with remote registry {replacement_name};\ninclude `--registry {replacement_name}` or `--registry crates-io`"); + } else { + bail!("crates-io is replaced with non-remote-registry source {replacement_sid};\ninclude `--registry crates-io` to use crates.io"); + } + } else { + Ok(RegistrySourceIds { + original: sid, + replacement: builtin_replacement_sid, + }) } } +struct RegistrySourceIds { + /// Use when looking up the auth token, or writing out `Cargo.lock` + original: SourceId, + /// Use when interacting with the source (querying / publishing , etc) + /// + /// The source for crates.io may be replaced by a built-in source for accessing crates.io with + /// the sparse protocol, or a source for the testing framework (when the replace_crates_io + /// function is used) + /// + /// User-defined source replacement is not applied. + replacement: SourceId, +} + pub fn search( query: &str, config: &Config, @@ -963,19 +1061,7 @@ limit: u32, reg: Option, ) -> CargoResult<()> { - fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { - // We should truncate at grapheme-boundary and compute character-widths, - // yet the dependencies on unicode-segmentation and unicode-width are - // not worth it. - let mut chars = s.chars(); - let mut prefix = (&mut chars).take(max_width - 1).collect::(); - if chars.next().is_some() { - prefix.push('…'); - } - prefix - } - - let (mut registry, _, source_id) = + let (mut registry, _, source_ids) = registry(config, None, index.as_deref(), reg.as_deref(), false, false)?; let (crates, total_crates) = registry.search(query, limit).with_context(|| { format!( @@ -1032,7 +1118,7 @@ &ColorSpec::new(), ); } else if total_crates > limit && limit >= search_max_limit { - let extra = if source_id.is_default_registry() { + let extra = if source_ids.original.is_crates_io() { format!( " (go to https://crates.io/search?q={} to see more)", percent_encode(query.as_bytes(), NON_ALPHANUMERIC) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/tree/format/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/tree/format/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/tree/format/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/tree/format/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -84,7 +84,7 @@ )?; let source_id = package.package_id().source_id(); - if !source_id.is_default_registry() { + if !source_id.is_crates_io() { write!(fmt, " ({})", source_id)?; } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/tree/graph.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/tree/graph.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/tree/graph.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/tree/graph.rs 2023-01-10 13:41:19.000000000 +0000 @@ -325,8 +325,8 @@ let node_features = resolved_features.activated_features(package_id, features_for); let node_kind = match features_for { FeaturesFor::HostDep => CompileKind::Host, - FeaturesFor::NormalOrDevOrArtifactTarget(Some(target)) => CompileKind::Target(target), - FeaturesFor::NormalOrDevOrArtifactTarget(None) => requested_kind, + FeaturesFor::ArtifactDep(target) => CompileKind::Target(target), + FeaturesFor::NormalOrDev => requested_kind, }; let node = Node::Package { package_id, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/vendor.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/vendor.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/ops/vendor.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/ops/vendor.rs 2023-01-10 13:41:19.000000000 +0000 @@ -252,13 +252,13 @@ // replace original sources with vendor for source_id in sources { - let name = if source_id.is_default_registry() { + let name = if source_id.is_crates_io() { CRATES_IO_REGISTRY.to_string() } else { source_id.url().to_string() }; - let source = if source_id.is_default_registry() { + let source = if source_id.is_crates_io() { VendorSource::Registry { registry: None, replace_with: merged_source_name.to_string(), diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/config.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/config.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/config.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/config.rs 2023-01-10 13:41:19.000000000 +0000 @@ -91,7 +91,7 @@ replace_with: None, }, )?; - if config.cli_unstable().sparse_registry { + if SourceId::crates_io_is_sparse(config)? { base.add( CRATES_IO_REGISTRY, SourceConfig { @@ -100,6 +100,15 @@ }, )?; } + if let Ok(url) = std::env::var("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS") { + base.add( + CRATES_IO_REGISTRY, + SourceConfig { + id: SourceId::for_alt_registry(&url.parse()?, CRATES_IO_REGISTRY)?, + replace_with: None, + }, + )?; + } Ok(base) } @@ -121,18 +130,24 @@ }; let mut cfg_loc = ""; let orig_name = name; - let new_id; - loop { + let new_id = loop { let cfg = match self.cfgs.get(name) { Some(cfg) => cfg, - None => bail!( - "could not find a configured source with the \ + None => { + // Attempt to interpret the source name as an alt registry name + if let Ok(alt_id) = SourceId::alt_registry(self.config, name) { + debug!("following pointer to registry {}", name); + break alt_id.with_precise(id.precise().map(str::to_string)); + } + bail!( + "could not find a configured source with the \ name `{}` when attempting to lookup `{}` \ (configuration in `{}`)", - name, - orig_name, - cfg_loc - ), + name, + orig_name, + cfg_loc + ); + } }; match &cfg.replace_with { Some((s, c)) => { @@ -141,8 +156,7 @@ } None if id == cfg.id => return id.load(self.config, yanked_whitelist), None => { - new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string())); - break; + break cfg.id.with_precise(id.precise().map(|s| s.to_string())); } } debug!("following pointer to {}", name); @@ -155,7 +169,7 @@ cfg_loc ) } - } + }; let new_src = new_id.load( self.config, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/known_hosts.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/known_hosts.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/known_hosts.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/known_hosts.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,628 @@ +//! SSH host key validation support. +//! +//! A primary goal with this implementation is to provide user-friendly error +//! messages, guiding them to understand the issue and how to resolve it. +//! +//! Note that there are a lot of limitations here. This reads OpenSSH +//! known_hosts files from well-known locations, but it does not read OpenSSH +//! config files. The config file can change the behavior of how OpenSSH +//! handles known_hosts files. For example, some things we don't handle: +//! +//! - `GlobalKnownHostsFile` — Changes the location of the global host file. +//! - `UserKnownHostsFile` — Changes the location of the user's host file. +//! - `KnownHostsCommand` — A command to fetch known hosts. +//! - `CheckHostIP` — DNS spoofing checks. +//! - `VisualHostKey` — Shows a visual ascii-art key. +//! - `VerifyHostKeyDNS` — Uses SSHFP DNS records to fetch a host key. +//! +//! There's also a number of things that aren't supported but could be easily +//! added (it just adds a little complexity). For example, hostname patterns, +//! and revoked markers. See "FIXME" comments littered in this file. + +use crate::util::config::{Definition, Value}; +use git2::cert::{Cert, SshHostKeyType}; +use git2::CertificateCheckStatus; +use hmac::Mac; +use std::collections::HashSet; +use std::fmt::Write; +use std::path::{Path, PathBuf}; + +/// These are host keys that are hard-coded in cargo to provide convenience. +/// +/// If GitHub ever publishes new keys, the user can add them to their own +/// configuration file to use those instead. +/// +/// The GitHub keys are sourced from or +/// . +/// +/// These will be ignored if the user adds their own entries for `github.com`, +/// which can be useful if GitHub ever revokes their old keys. +static BUNDLED_KEYS: &[(&str, &str, &str)] = &[ + ("github.com", "ssh-ed25519", "AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl"), + ("github.com", "ecdsa-sha2-nistp256", "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg="), + ("github.com", "ssh-rsa", "AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ=="), +]; + +enum KnownHostError { + /// Some general error happened while validating the known hosts. + CheckError(anyhow::Error), + /// The host key was not found. + HostKeyNotFound { + hostname: String, + key_type: SshHostKeyType, + remote_host_key: String, + remote_fingerprint: String, + other_hosts: Vec, + }, + /// The host key was found, but does not match the remote's key. + HostKeyHasChanged { + hostname: String, + key_type: SshHostKeyType, + old_known_host: KnownHost, + remote_host_key: String, + remote_fingerprint: String, + }, +} + +impl From for KnownHostError { + fn from(err: anyhow::Error) -> KnownHostError { + KnownHostError::CheckError(err.into()) + } +} + +/// The location where a host key was located. +#[derive(Clone)] +enum KnownHostLocation { + /// Loaded from a file from disk. + File { path: PathBuf, lineno: u32 }, + /// Loaded from cargo's config system. + Config { definition: Definition }, + /// Part of the hard-coded bundled keys in Cargo. + Bundled, +} + +/// The git2 callback used to validate a certificate (only ssh known hosts are validated). +pub fn certificate_check( + cert: &Cert<'_>, + host: &str, + port: Option, + config_known_hosts: Option<&Vec>>, + diagnostic_home_config: &str, +) -> Result { + let Some(host_key) = cert.as_hostkey() else { + // Return passthrough for TLS X509 certificates to use whatever validation + // was done in git2. + return Ok(CertificateCheckStatus::CertificatePassthrough) + }; + // If a nonstandard port is in use, check for that first. + // The fallback to check without a port is handled in the HostKeyNotFound handler. + let host_maybe_port = match port { + Some(port) if port != 22 => format!("[{host}]:{port}"), + _ => host.to_string(), + }; + // The error message must be constructed as a string to pass through the libgit2 C API. + let err_msg = match check_ssh_known_hosts(host_key, &host_maybe_port, config_known_hosts) { + Ok(()) => { + return Ok(CertificateCheckStatus::CertificateOk); + } + Err(KnownHostError::CheckError(e)) => { + format!("error: failed to validate host key:\n{:#}", e) + } + Err(KnownHostError::HostKeyNotFound { + hostname, + key_type, + remote_host_key, + remote_fingerprint, + other_hosts, + }) => { + // Try checking without the port. + if port.is_some() + && !matches!(port, Some(22)) + && check_ssh_known_hosts(host_key, host, config_known_hosts).is_ok() + { + return Ok(CertificateCheckStatus::CertificateOk); + } + let key_type_short_name = key_type.short_name(); + let key_type_name = key_type.name(); + let known_hosts_location = user_known_host_location_to_add(diagnostic_home_config); + let other_hosts_message = if other_hosts.is_empty() { + String::new() + } else { + let mut msg = String::from( + "Note: This host key was found, \ + but is associated with a different host:\n", + ); + for known_host in other_hosts { + let loc = match known_host.location { + KnownHostLocation::File { path, lineno } => { + format!("{} line {lineno}", path.display()) + } + KnownHostLocation::Config { definition } => { + format!("config value from {definition}") + } + KnownHostLocation::Bundled => format!("bundled with cargo"), + }; + write!(msg, " {loc}: {}\n", known_host.patterns).unwrap(); + } + msg + }; + format!("error: unknown SSH host key\n\ + The SSH host key for `{hostname}` is not known and cannot be validated.\n\ + \n\ + To resolve this issue, add the host key to {known_hosts_location}\n\ + \n\ + The key to add is:\n\ + \n\ + {hostname} {key_type_name} {remote_host_key}\n\ + \n\ + The {key_type_short_name} key fingerprint is: SHA256:{remote_fingerprint}\n\ + This fingerprint should be validated with the server administrator that it is correct.\n\ + {other_hosts_message}\n\ + See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \ + for more information.\n\ + ") + } + Err(KnownHostError::HostKeyHasChanged { + hostname, + key_type, + old_known_host, + remote_host_key, + remote_fingerprint, + }) => { + let key_type_short_name = key_type.short_name(); + let key_type_name = key_type.name(); + let known_hosts_location = user_known_host_location_to_add(diagnostic_home_config); + let old_key_resolution = match old_known_host.location { + KnownHostLocation::File { path, lineno } => { + let old_key_location = path.display(); + format!( + "removing the old {key_type_name} key for `{hostname}` \ + located at {old_key_location} line {lineno}, \ + and adding the new key to {known_hosts_location}", + ) + } + KnownHostLocation::Config { definition } => { + format!( + "removing the old {key_type_name} key for `{hostname}` \ + loaded from Cargo's config at {definition}, \ + and adding the new key to {known_hosts_location}" + ) + } + KnownHostLocation::Bundled => { + format!( + "adding the new key to {known_hosts_location}\n\ + The current host key is bundled as part of Cargo." + ) + } + }; + format!("error: SSH host key has changed for `{hostname}`\n\ + *********************************\n\ + * WARNING: HOST KEY HAS CHANGED *\n\ + *********************************\n\ + This may be caused by a man-in-the-middle attack, or the \ + server may have changed its host key.\n\ + \n\ + The {key_type_short_name} fingerprint for the key from the remote host is:\n\ + SHA256:{remote_fingerprint}\n\ + \n\ + You are strongly encouraged to contact the server \ + administrator for `{hostname}` to verify that this new key is \ + correct.\n\ + \n\ + If you can verify that the server has a new key, you can \ + resolve this error by {old_key_resolution}\n\ + \n\ + The key provided by the remote host is:\n\ + \n\ + {hostname} {key_type_name} {remote_host_key}\n\ + \n\ + See https://doc.rust-lang.org/stable/cargo/appendix/git-authentication.html#ssh-known-hosts \ + for more information.\n\ + ") + } + }; + Err(git2::Error::new( + git2::ErrorCode::GenericError, + git2::ErrorClass::Callback, + err_msg, + )) +} + +/// Checks if the given host/host key pair is known. +fn check_ssh_known_hosts( + cert_host_key: &git2::cert::CertHostkey<'_>, + host: &str, + config_known_hosts: Option<&Vec>>, +) -> Result<(), KnownHostError> { + let Some(remote_host_key) = cert_host_key.hostkey() else { + return Err(anyhow::format_err!("remote host key is not available").into()); + }; + let remote_key_type = cert_host_key.hostkey_type().unwrap(); + + // Collect all the known host entries from disk. + let mut known_hosts = Vec::new(); + for path in known_host_files() { + if !path.exists() { + continue; + } + let hosts = load_hostfile(&path)?; + known_hosts.extend(hosts); + } + if let Some(config_known_hosts) = config_known_hosts { + // Format errors aren't an error in case the format needs to change in + // the future, to retain forwards compatibility. + for line_value in config_known_hosts { + let location = KnownHostLocation::Config { + definition: line_value.definition.clone(), + }; + match parse_known_hosts_line(&line_value.val, location) { + Some(known_host) => known_hosts.push(known_host), + None => log::warn!( + "failed to parse known host {} from {}", + line_value.val, + line_value.definition + ), + } + } + } + // Load the bundled keys. Don't add keys for hosts that the user has + // configured, which gives them the option to override them. This could be + // useful if the keys are ever revoked. + let configured_hosts: HashSet<_> = known_hosts + .iter() + .flat_map(|known_host| { + known_host + .patterns + .split(',') + .map(|pattern| pattern.to_lowercase()) + }) + .collect(); + for (patterns, key_type, key) in BUNDLED_KEYS { + if !configured_hosts.contains(*patterns) { + let key = base64::decode(key).unwrap(); + known_hosts.push(KnownHost { + location: KnownHostLocation::Bundled, + patterns: patterns.to_string(), + key_type: key_type.to_string(), + key, + }); + } + } + check_ssh_known_hosts_loaded(&known_hosts, host, remote_key_type, remote_host_key) +} + +/// Checks a host key against a loaded set of known hosts. +fn check_ssh_known_hosts_loaded( + known_hosts: &[KnownHost], + host: &str, + remote_key_type: SshHostKeyType, + remote_host_key: &[u8], +) -> Result<(), KnownHostError> { + // `changed_key` keeps track of any entries where the key has changed. + let mut changed_key = None; + // `other_hosts` keeps track of any entries that have an identical key, + // but a different hostname. + let mut other_hosts = Vec::new(); + + for known_host in known_hosts { + // The key type from libgit2 needs to match the key type from the host file. + if known_host.key_type != remote_key_type.name() { + continue; + } + let key_matches = known_host.key == remote_host_key; + if !known_host.host_matches(host) { + if key_matches { + other_hosts.push(known_host.clone()); + } + continue; + } + if key_matches { + return Ok(()); + } + // The host and key type matched, but the key itself did not. + // This indicates the key has changed. + // This is only reported as an error if no subsequent lines have a + // correct key. + changed_key = Some(known_host.clone()); + } + // Older versions of OpenSSH (before 6.8, March 2015) showed MD5 + // fingerprints (see FingerprintHash ssh config option). Here we only + // support SHA256. + let mut remote_fingerprint = cargo_util::Sha256::new(); + remote_fingerprint.update(remote_host_key); + let remote_fingerprint = + base64::encode_config(remote_fingerprint.finish(), base64::STANDARD_NO_PAD); + let remote_host_key = base64::encode(remote_host_key); + // FIXME: Ideally the error message should include the IP address of the + // remote host (to help the user validate that they are connecting to the + // host they were expecting to). However, I don't see a way to obtain that + // information from libgit2. + match changed_key { + Some(old_known_host) => Err(KnownHostError::HostKeyHasChanged { + hostname: host.to_string(), + key_type: remote_key_type, + old_known_host, + remote_host_key, + remote_fingerprint, + }), + None => Err(KnownHostError::HostKeyNotFound { + hostname: host.to_string(), + key_type: remote_key_type, + remote_host_key, + remote_fingerprint, + other_hosts, + }), + } +} + +/// Returns a list of files to try loading OpenSSH-formatted known hosts. +fn known_host_files() -> Vec { + let mut result = Vec::new(); + if cfg!(unix) { + result.push(PathBuf::from("/etc/ssh/ssh_known_hosts")); + } else if cfg!(windows) { + // The msys/cygwin version of OpenSSH uses `/etc` from the posix root + // filesystem there (such as `C:\msys64\etc\ssh\ssh_known_hosts`). + // However, I do not know of a way to obtain that location from + // Windows-land. The ProgramData version here is what the PowerShell + // port of OpenSSH does. + if let Some(progdata) = std::env::var_os("ProgramData") { + let mut progdata = PathBuf::from(progdata); + progdata.push("ssh"); + progdata.push("ssh_known_hosts"); + result.push(progdata) + } + } + result.extend(user_known_host_location()); + result +} + +/// The location of the user's known_hosts file. +fn user_known_host_location() -> Option { + // NOTE: This is a potentially inaccurate prediction of what the user + // actually wants. The actual location depends on several factors: + // + // - Windows OpenSSH Powershell version: I believe this looks up the home + // directory via ProfileImagePath in the registry, falling back to + // `GetWindowsDirectoryW` if that fails. + // - OpenSSH Portable (under msys): This is very complicated. I got lost + // after following it through some ldap/active directory stuff. + // - OpenSSH (most unix platforms): Uses `pw->pw_dir` from `getpwuid()`. + // + // This doesn't do anything close to that. home_dir's behavior is: + // - Windows: $USERPROFILE, or SHGetFolderPathW() + // - Unix: $HOME, or getpwuid_r() + // + // Since there is a mismatch here, the location returned here might be + // different than what the user's `ssh` CLI command uses. We may want to + // consider trying to align it better. + home::home_dir().map(|mut home| { + home.push(".ssh"); + home.push("known_hosts"); + home + }) +} + +/// The location to display in an error message instructing the user where to +/// add the new key. +fn user_known_host_location_to_add(diagnostic_home_config: &str) -> String { + // Note that we don't bother with the legacy known_hosts2 files. + let user = user_known_host_location(); + let openssh_loc = match &user { + Some(path) => path.to_str().expect("utf-8 home"), + None => "~/.ssh/known_hosts", + }; + format!( + "the `net.ssh.known-hosts` array in your Cargo configuration \ + (such as {diagnostic_home_config}) \ + or in your OpenSSH known_hosts file at {openssh_loc}" + ) +} + +const HASH_HOSTNAME_PREFIX: &str = "|1|"; + +/// A single known host entry. +#[derive(Clone)] +struct KnownHost { + location: KnownHostLocation, + /// The hostname. May be comma separated to match multiple hosts. + patterns: String, + key_type: String, + key: Vec, +} + +impl KnownHost { + /// Returns whether or not the given host matches this known host entry. + fn host_matches(&self, host: &str) -> bool { + let mut match_found = false; + let host = host.to_lowercase(); + if let Some(hashed) = self.patterns.strip_prefix(HASH_HOSTNAME_PREFIX) { + return hashed_hostname_matches(&host, hashed); + } + for pattern in self.patterns.split(',') { + let pattern = pattern.to_lowercase(); + // FIXME: support * and ? wildcards + if let Some(pattern) = pattern.strip_prefix('!') { + if pattern == host { + return false; + } + } else { + match_found |= pattern == host; + } + } + match_found + } +} + +fn hashed_hostname_matches(host: &str, hashed: &str) -> bool { + let Some((b64_salt, b64_host)) = hashed.split_once('|') else { return false; }; + let Ok(salt) = base64::decode(b64_salt) else { return false; }; + let Ok(hashed_host) = base64::decode(b64_host) else { return false; }; + let Ok(mut mac) = hmac::Hmac::::new_from_slice(&salt) else { return false; }; + mac.update(host.as_bytes()); + let result = mac.finalize().into_bytes(); + hashed_host == &result[..] +} + +/// Loads an OpenSSH known_hosts file. +fn load_hostfile(path: &Path) -> Result, anyhow::Error> { + let contents = cargo_util::paths::read(path)?; + Ok(load_hostfile_contents(path, &contents)) +} + +fn load_hostfile_contents(path: &Path, contents: &str) -> Vec { + let entries = contents + .lines() + .enumerate() + .filter_map(|(lineno, line)| { + let location = KnownHostLocation::File { + path: path.to_path_buf(), + lineno: lineno as u32 + 1, + }; + parse_known_hosts_line(line, location) + }) + .collect(); + entries +} + +fn parse_known_hosts_line(line: &str, location: KnownHostLocation) -> Option { + let line = line.trim(); + // FIXME: @revoked and @cert-authority is currently not supported. + if line.is_empty() || line.starts_with(['#', '@']) { + return None; + } + let mut parts = line.split([' ', '\t']).filter(|s| !s.is_empty()); + let patterns = parts.next()?; + let key_type = parts.next()?; + let key = parts.next().map(base64::decode)?.ok()?; + Some(KnownHost { + location, + patterns: patterns.to_string(), + key_type: key_type.to_string(), + key, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + static COMMON_CONTENTS: &str = r#" + # Comments allowed at start of line + + example.com,rust-lang.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC5MzWIpZwpkpDjyCNiTIEVFhSA9OUUQvjFo7CgZBGCAj/cqeUIgiLsgtfmtBsfWIkAECQpM7ePP7NLZFGJcHvoyg5jXJiIX5s0eKo9IlcuTLLrMkW5MkHXE7bNklVbW1WdCfF2+y7Ao25B4L8FFRokMh0yp/H6+8xZ7PdVwL3FRPEg8ftZ5R0kuups6xiMHPRX+f/07vfJzA47YDPmXfhkn+JK8kL0JYw8iy8BtNBfRQL99d9iXJzWXnNce5NHMuKD5rOonD3aQHLDlwK+KhrFRrdaxQEM8ZWxNti0ux8yT4Dl5jJY0CrIu3Xl6+qroVgTqJGNkTbhs5DGWdFh6BLPTTH15rN4buisg7uMyLyHqx06ckborqD33gWu+Jig7O+PV6KJmL5mp1O1HXvZqkpBdTiT6GiDKG3oECCIXkUk0BSU9VG9VQcrMxxvgiHlyoXUAfYQoXv/lnxkTnm+Sr36kutsVOs7n5B43ZKAeuaxyQ11huJZpxamc0RA1HM641s= eric@host + Example.net ssh-dss AAAAB3NzaC1kc3MAAACBAK2Ek3jVxisXmz5UcZ7W65BAj/nDJCCVvSe0Aytndn4PH6k7sVesut5OoY6PdksZ9tEfuFjjS9HR5SJb8j1GW0GxtaSHHbf+rNc36PeU75bffzyIWwpA8uZFONt5swUAXJXcsHOoapNbUFuhHsRhB2hXxz9QGNiiwIwRJeSHixKRAAAAFQChKfxO1z9H2/757697xP5nJ/Z5dwAAAIEAoc+HIWas+4WowtB/KtAp6XE0B9oHI+55wKtdcGwwb7zHKK9scWNXwxIcMhSvyB3Oe2I7dQQlvyIWxsdZlzOkX0wdsTHjIAnBAP68MyvMv4kq3+I5GAVcFsqoLZfZvh0dlcgUq1/YNYZwKlt89tnzk8Fp4KLWmuw8Bd8IShYVa78AAACAL3qd8kNTY7CthgsQ8iWdjbkGSF/1KCeFyt8UjurInp9wvPDjqagwakbyLOzN7y3/ItTPCaGuX+RjFP0zZTf8i9bsAVyjFJiJ7vzRXcWytuFWANrpzLTn1qzPfh63iK92Aw8AVBYvEA/4bxo+XReAvhNBB/m78G6OedTeu6ZoTsI= eric@host + [example.net]:2222 ssh-dss AAAAB3NzaC1kc3MAAACBAJJN5kLZEpOJpXWyMT4KwYvLAj+b9ErNtglxOi86C6Kw7oZeYdDMCfD3lc3PJyX64udQcWGfO4abSESMiYdY43yFAZH279QGH5Q/B5CklVvTqYpfAUR+1r9TQxy3OVQHk7FB2wOi4xNQ3myO0vaYlBOB9il+P223aERbXx4JTWdvAAAAFQCTHWTcXxLK5Z6ZVPmfdSDyHzkF2wAAAIEAhp41/mTnM0Y0EWSyCXuETMW1QSpKGF8sqoZKp6wdzyhLXu0i32gLdXj4p24em/jObYh93hr+MwgxqWq+FHgD+D80Qg5f6vj4yEl4Uu5hqtTpCBFWUQoyEckbUkPf8uZ4/XzAne+tUSjZm09xATCmK9U2IGqZE+D+90eBkf1Svc8AAACAeKhi4EtfwenFYqKz60ZoEEhIsE1yI2jH73akHnfHpcW84w+fk3YlwjcfDfyYso+D0jZBdJeK5qIdkbUWhAX8wDjJVO0WL6r/YPr4yu/CgEyW1H59tAbujGJ4NR0JDqioulzYqNHnxpiw1RJukZnPBfSFKzRElvPOCq/NkQM/Mwk= eric@host + nistp256.example.org ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJ4iYGCcJrUIfrHfzlsv8e8kaF36qpcUpe3VNAKVCZX/BDptIdlEe8u8vKNRTPgUO9jqS0+tjTcPiQd8/8I9qng= eric@host + nistp384.example.org ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBNuGT3TqMz2rcwOt2ZqkiNqq7dvWPE66W2qPCoZsh0pQhVU3BnhKIc6nEr6+Wts0Z3jdF3QWwxbbTjbVTVhdr8fMCFhDCWiQFm9xLerYPKnu9qHvx9K87/fjc5+0pu4hLA== eric@host + nistp521.example.org ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBAD35HH6OsK4DN75BrKipVj/GvZaUzjPNa1F8wMjUdPB1JlVcUfgzJjWSxrhmaNN3u0soiZw8WNRFINsGPCw5E7DywF1689WcIj2Ye2rcy99je15FknScTzBBD04JgIyOI50mCUaPCBoF14vFlN6BmO00cFo+yzy5N8GuQ2sx9kr21xmFQ== eric@host + # Revoked not yet supported. + @revoked * ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKtQsi+KPYispwm2rkMidQf30fG1Niy8XNkvASfePoca eric@host + example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAWkjI6XT2SZh3xNk5NhisA3o3sGzWR+VAKMSqHtI0aY eric@host + 192.168.42.12 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKVYJpa0yUGaNk0NXQTPWa0tHjqRpx+7hl2diReH6DtR eric@host + |1|QxzZoTXIWLhUsuHAXjuDMIV3FjQ=|M6NCOIkjiWdCWqkh5+Q+/uFLGjs= ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIHgN3O21U4LWtP5OzjTzPnUnSDmCNDvyvlaj6Hi65JC eric@host + # Negation isn't terribly useful without globs. + neg.example.com,!neg.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOXfUnaAHTlo1Qi//rNk26OcmHikmkns1Z6WW/UuuS3K eric@host + "#; + + #[test] + fn known_hosts_parse() { + let kh_path = Path::new("/home/abc/.known_hosts"); + let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS); + assert_eq!(khs.len(), 10); + match &khs[0].location { + KnownHostLocation::File { path, lineno } => { + assert_eq!(path, kh_path); + assert_eq!(*lineno, 4); + } + _ => panic!("unexpected"), + } + assert_eq!(khs[0].patterns, "example.com,rust-lang.org"); + assert_eq!(khs[0].key_type, "ssh-rsa"); + assert_eq!(khs[0].key.len(), 407); + assert_eq!(&khs[0].key[..30], b"\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x03\x01\x00\x01\x00\x00\x01\x81\x00\xb935\x88\xa5\x9c)"); + match &khs[1].location { + KnownHostLocation::File { path, lineno } => { + assert_eq!(path, kh_path); + assert_eq!(*lineno, 5); + } + _ => panic!("unexpected"), + } + assert_eq!(khs[2].patterns, "[example.net]:2222"); + assert_eq!(khs[3].patterns, "nistp256.example.org"); + assert_eq!(khs[7].patterns, "192.168.42.12"); + } + + #[test] + fn host_matches() { + let kh_path = Path::new("/home/abc/.known_hosts"); + let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS); + assert!(khs[0].host_matches("example.com")); + assert!(khs[0].host_matches("rust-lang.org")); + assert!(khs[0].host_matches("EXAMPLE.COM")); + assert!(khs[1].host_matches("example.net")); + assert!(!khs[0].host_matches("example.net")); + assert!(khs[2].host_matches("[example.net]:2222")); + assert!(!khs[2].host_matches("example.net")); + assert!(khs[8].host_matches("hashed.example.com")); + assert!(!khs[8].host_matches("example.com")); + assert!(!khs[9].host_matches("neg.example.com")); + } + + #[test] + fn check_match() { + let kh_path = Path::new("/home/abc/.known_hosts"); + let khs = load_hostfile_contents(kh_path, COMMON_CONTENTS); + + assert!(check_ssh_known_hosts_loaded( + &khs, + "example.com", + SshHostKeyType::Rsa, + &khs[0].key + ) + .is_ok()); + + match check_ssh_known_hosts_loaded(&khs, "example.com", SshHostKeyType::Dss, &khs[0].key) { + Err(KnownHostError::HostKeyNotFound { + hostname, + remote_fingerprint, + other_hosts, + .. + }) => { + assert_eq!( + remote_fingerprint, + "yn+pONDn0EcgdOCVptgB4RZd/wqmsVKrPnQMLtrvhw8" + ); + assert_eq!(hostname, "example.com"); + assert_eq!(other_hosts.len(), 0); + } + _ => panic!("unexpected"), + } + + match check_ssh_known_hosts_loaded( + &khs, + "foo.example.com", + SshHostKeyType::Rsa, + &khs[0].key, + ) { + Err(KnownHostError::HostKeyNotFound { other_hosts, .. }) => { + assert_eq!(other_hosts.len(), 1); + assert_eq!(other_hosts[0].patterns, "example.com,rust-lang.org"); + } + _ => panic!("unexpected"), + } + + let mut modified_key = khs[0].key.clone(); + modified_key[0] = 1; + match check_ssh_known_hosts_loaded(&khs, "example.com", SshHostKeyType::Rsa, &modified_key) + { + Err(KnownHostError::HostKeyHasChanged { old_known_host, .. }) => { + assert!(matches!( + old_known_host.location, + KnownHostLocation::File { lineno: 4, .. } + )); + } + _ => panic!("unexpected"), + } + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,4 +1,5 @@ pub use self::source::GitSource; pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote}; +mod known_hosts; mod source; mod utils; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/source.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/source.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/source.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/source.rs 2023-01-10 13:41:19.000000000 +0000 @@ -191,7 +191,8 @@ .join("checkouts") .join(&self.ident) .join(short_id.as_str()); - db.copy_to(actual_rev, &checkout_path, self.config)?; + let parent_remote_url = self.url(); + db.copy_to(actual_rev, &checkout_path, self.config, parent_remote_url)?; let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/utils.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/utils.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/git/utils.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/git/utils.rs 2023-01-10 13:41:19.000000000 +0000 @@ -11,6 +11,7 @@ use log::{debug, info}; use serde::ser; use serde::Serialize; +use std::borrow::Cow; use std::env; use std::fmt; use std::path::{Path, PathBuf}; @@ -151,6 +152,7 @@ rev: git2::Oid, dest: &Path, cargo_config: &Config, + parent_remote_url: &Url, ) -> CargoResult> { // If the existing checkout exists, and it is fresh, use it. // A non-fresh checkout can happen if the checkout operation was @@ -164,7 +166,7 @@ Some(co) => co, None => GitCheckout::clone_into(dest, self, rev, cargo_config)?, }; - checkout.update_submodules(cargo_config)?; + checkout.update_submodules(cargo_config, parent_remote_url)?; Ok(checkout) } @@ -322,19 +324,25 @@ Ok(()) } - fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { - return update_submodules(&self.repo, cargo_config); + fn update_submodules(&self, cargo_config: &Config, parent_remote_url: &Url) -> CargoResult<()> { + return update_submodules(&self.repo, cargo_config, parent_remote_url); - fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { + fn update_submodules( + repo: &git2::Repository, + cargo_config: &Config, + parent_remote_url: &Url, + ) -> CargoResult<()> { debug!("update submodules for: {:?}", repo.workdir().unwrap()); for mut child in repo.submodules()? { - update_submodule(repo, &mut child, cargo_config).with_context(|| { - format!( - "failed to update submodule `{}`", - child.name().unwrap_or("") - ) - })?; + update_submodule(repo, &mut child, cargo_config, parent_remote_url).with_context( + || { + format!( + "failed to update submodule `{}`", + child.name().unwrap_or("") + ) + }, + )?; } Ok(()) } @@ -343,9 +351,11 @@ parent: &git2::Repository, child: &mut git2::Submodule<'_>, cargo_config: &Config, + parent_remote_url: &Url, ) -> CargoResult<()> { child.init(false)?; - let url = child.url().ok_or_else(|| { + + let child_url_str = child.url().ok_or_else(|| { anyhow::format_err!("non-utf8 url for submodule {:?}?", child.path()) })?; @@ -355,12 +365,38 @@ "Skipping", format!( "git submodule `{}` due to update strategy in .gitmodules", - url + child_url_str ), )?; return Ok(()); } + // Git only assumes a URL is a relative path if it starts with `./` or `../`. + // See [`git submodule add`] documentation. + // + // [`git submodule add`]: https://git-scm.com/docs/git-submodule + let url = if child_url_str.starts_with("./") || child_url_str.starts_with("../") { + let mut new_parent_remote_url = parent_remote_url.clone(); + + let mut new_path = Cow::from(parent_remote_url.path()); + if !new_path.ends_with('/') { + new_path.to_mut().push('/'); + } + new_parent_remote_url.set_path(&new_path); + + match new_parent_remote_url.join(child_url_str) { + Ok(x) => x.to_string(), + Err(err) => Err(err).with_context(|| { + format!( + "failed to parse relative child submodule url `{}` using parent base url `{}`", + child_url_str, new_parent_remote_url + ) + })?, + } + } else { + child_url_str.to_string() + }; + // A submodule which is listed in .gitmodules but not actually // checked out will not have a head id, so we should ignore it. let head = match child.head_id() { @@ -379,7 +415,7 @@ let mut repo = match head_and_repo { Ok((head, repo)) => { if child.head_id() == head { - return update_submodules(&repo, cargo_config); + return update_submodules(&repo, cargo_config, parent_remote_url); } repo } @@ -394,7 +430,7 @@ cargo_config .shell() .status("Updating", format!("git submodule `{}`", url))?; - fetch(&mut repo, url, &reference, cargo_config).with_context(|| { + fetch(&mut repo, &url, &reference, cargo_config).with_context(|| { format!( "failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), @@ -404,7 +440,7 @@ let obj = repo.find_object(head, None)?; reset(&repo, &obj, cargo_config)?; - update_submodules(&repo, cargo_config) + update_submodules(&repo, cargo_config, parent_remote_url) } } } @@ -647,7 +683,6 @@ | ErrorClass::Submodule | ErrorClass::FetchHead | ErrorClass::Ssh - | ErrorClass::Callback | ErrorClass::Http => { let mut msg = "network failure seems to have happened\n".to_string(); msg.push_str( @@ -658,6 +693,13 @@ ); err = err.context(msg); } + ErrorClass::Callback => { + // This unwraps the git2 error. We're using the callback error + // specifically to convey errors from Rust land through the C + // callback interface. We don't need the `; class=Callback + // (26)` that gets tacked on to the git2 error message. + err = anyhow::format_err!("{}", e.message()); + } _ => {} } } @@ -684,14 +726,27 @@ cb: &mut dyn FnMut(git2::FetchOptions<'_>) -> CargoResult<()>, ) -> CargoResult<()> { let mut progress = Progress::new("Fetch", config); + let ssh_config = config.net_config()?.ssh.as_ref(); + let config_known_hosts = ssh_config.and_then(|ssh| ssh.known_hosts.as_ref()); + let diagnostic_home_config = config.diagnostic_home_config(); network::with_retry(config, || { with_authentication(url, git_config, |f| { + let port = Url::parse(url).ok().and_then(|url| url.port()); let mut last_update = Instant::now(); let mut rcb = git2::RemoteCallbacks::new(); // We choose `N=10` here to make a `300ms * 10slots ~= 3000ms` // sliding window for tracking the data transfer rate (in bytes/s). let mut counter = MetricsCounter::<10>::new(0, last_update); rcb.credentials(f); + rcb.certificate_check(|cert, host| { + super::known_hosts::certificate_check( + cert, + host, + port, + config_known_hosts, + &diagnostic_home_config, + ) + }); rcb.transfer_progress(|stats| { let indexed_deltas = stats.indexed_deltas(); let msg = if indexed_deltas > 0 { @@ -772,7 +827,7 @@ // We reuse repositories quite a lot, so before we go through and update the // repo check to see if it's a little too old and could benefit from a gc. - // In theory this shouldn't be too too expensive compared to the network + // In theory this shouldn't be too expensive compared to the network // request we're about to issue. maybe_gc_repo(repo)?; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/http_remote.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/http_remote.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/http_remote.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/http_remote.rs 2023-01-10 13:41:19.000000000 +0000 @@ -7,8 +7,9 @@ use crate::sources::registry::download; use crate::sources::registry::MaybeLock; use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData}; -use crate::util::errors::CargoResult; -use crate::util::{Config, Filesystem, IntoUrl, Progress, ProgressStyle}; +use crate::util::errors::{CargoResult, HttpNotSuccessful}; +use crate::util::network::Retry; +use crate::util::{internal, Config, Filesystem, Progress, ProgressStyle}; use anyhow::Context; use cargo_util::paths; use curl::easy::{HttpVersion, List}; @@ -19,7 +20,7 @@ use std::fs::{self, File}; use std::path::{Path, PathBuf}; use std::str; -use std::task::Poll; +use std::task::{ready, Poll}; use std::time::Duration; use url::Url; @@ -83,15 +84,12 @@ /// When a download is started, it is added to this map. The key is a /// "token" (see `Download::token`). It is removed once the download is /// finished. - pending: HashMap, - /// Set of paths currently being downloaded, mapped to their tokens. + pending: HashMap, EasyHandle)>, + /// Set of paths currently being downloaded. /// This should stay in sync with `pending`. - pending_ids: HashMap, - /// The final result of each download. A pair `(token, result)`. This is a - /// temporary holding area, needed because curl can report multiple - /// downloads at once, but the main loop (`wait`) is written to only - /// handle one at a time. - results: HashMap>, + pending_paths: HashSet, + /// The final result of each download. + results: HashMap>, /// The next ID to use for creating a token (see `Download::token`). next: usize, /// Progress bar. @@ -103,7 +101,7 @@ blocking_calls: usize, } -struct Download { +struct Download<'cfg> { /// The token for this download, used as the key of the `Downloads::pending` map /// and stored in `EasyHandle` as well. token: usize, @@ -116,6 +114,9 @@ /// ETag or Last-Modified header received from the server (if any). index_version: RefCell>, + + /// Logic used to track retrying this download if it's a spurious failure. + retry: Retry<'cfg>, } struct CompletedDownload { @@ -136,25 +137,21 @@ let url = source_id.url().as_str(); // Ensure the url ends with a slash so we can concatenate paths. if !url.ends_with('/') { - anyhow::bail!("registry url must end in a slash `/`: {url}") + anyhow::bail!("sparse registry url must end in a slash `/`: sparse+{url}") } - let url = url - .trim_start_matches("sparse+") - .into_url() - .expect("a url with the protocol stripped should still be valid"); Ok(HttpRegistry { index_path: config.registry_index_path().join(name), cache_path: config.registry_cache_path().join(name), source_id, config, - url, + url: source_id.url().to_owned(), multi: Multi::new(), multiplexing: false, downloads: Downloads { next: 0, pending: HashMap::new(), - pending_ids: HashMap::new(), + pending_paths: HashSet::new(), results: HashMap::new(), progress: RefCell::new(Some(Progress::with_style( "Fetch", @@ -213,41 +210,64 @@ fn handle_completed_downloads(&mut self) -> CargoResult<()> { assert_eq!( self.downloads.pending.len(), - self.downloads.pending_ids.len() + self.downloads.pending_paths.len() ); // Collect the results from the Multi handle. - let pending = &mut self.downloads.pending; - self.multi.messages(|msg| { - let token = msg.token().expect("failed to read token"); - let (_, handle) = &pending[&token]; - let result = match msg.result_for(handle) { - Some(result) => result, - None => return, // transfer is not yet complete. - }; - - let (download, mut handle) = pending.remove(&token).unwrap(); - self.downloads.pending_ids.remove(&download.path).unwrap(); - - let result = match result { - Ok(()) => { - self.downloads.downloads_finished += 1; - match handle.response_code() { - Ok(code) => Ok(CompletedDownload { - response_code: code, - data: download.data.take(), - index_version: download - .index_version - .take() - .unwrap_or_else(|| UNKNOWN.to_string()), - }), - Err(e) => Err(e), + let results = { + let mut results = Vec::new(); + let pending = &mut self.downloads.pending; + self.multi.messages(|msg| { + let token = msg.token().expect("failed to read token"); + let (_, handle) = &pending[&token]; + if let Some(result) = msg.result_for(handle) { + results.push((token, result)); + }; + }); + results + }; + for (token, result) in results { + let (mut download, handle) = self.downloads.pending.remove(&token).unwrap(); + let mut handle = self.multi.remove(handle)?; + let data = download.data.take(); + let url = self.full_url(&download.path); + let result = match download.retry.r#try(|| { + result.with_context(|| format!("failed to download from `{}`", url))?; + let code = handle.response_code()?; + // Keep this list of expected status codes in sync with the codes handled in `load` + if !matches!(code, 200 | 304 | 410 | 404 | 451) { + let url = handle.effective_url()?.unwrap_or(&url); + return Err(HttpNotSuccessful { + code, + url: url.to_owned(), + body: data, } + .into()); + } + Ok(data) + }) { + Ok(Some(data)) => Ok(CompletedDownload { + response_code: handle.response_code()?, + data, + index_version: download + .index_version + .take() + .unwrap_or_else(|| UNKNOWN.to_string()), + }), + Ok(None) => { + // retry the operation + let handle = self.multi.add(handle)?; + self.downloads.pending.insert(token, (download, handle)); + continue; } Err(e) => Err(e), }; + + assert!(self.downloads.pending_paths.remove(&download.path)); self.downloads.results.insert(download.path, result); - }); + self.downloads.downloads_finished += 1; + } + self.downloads.tick()?; Ok(()) @@ -305,7 +325,7 @@ index_version: Option<&str>, ) -> Poll> { trace!("load: {}", path.display()); - if let Some(_token) = self.downloads.pending_ids.get(path) { + if let Some(_token) = self.downloads.pending_paths.get(path) { debug!("dependency is still pending: {}", path.display()); return Poll::Pending; } @@ -339,6 +359,8 @@ debug!("downloaded the index file `{}` twice", path.display()) } + // The status handled here need to be kept in sync with the codes handled + // in `handle_completed_downloads` match result.response_code { 200 => {} 304 => { @@ -355,13 +377,7 @@ return Poll::Ready(Ok(LoadResponse::NotFound)); } code => { - return Err(anyhow::anyhow!( - "server returned unexpected HTTP status code {} for {}\nbody: {}", - code, - self.full_url(path), - str::from_utf8(&result.data).unwrap_or(""), - )) - .into(); + return Err(internal(format!("unexpected HTTP status code {code}"))).into(); } } @@ -371,22 +387,12 @@ })); } - if self.config.offline() { - return Poll::Ready(Err(anyhow::anyhow!( - "can't download index file from '{}': you are in offline mode (--offline)", - self.url - ))); - } - // Looks like we're going to have to do a network request. self.start_fetch()?; // Load the registry config. if self.registry_config.is_none() && path != Path::new("config.json") { - match self.config()? { - Poll::Ready(_) => {} - Poll::Pending => return Poll::Pending, - } + ready!(self.config()?); } let mut handle = ops::http_handle(self.config)?; @@ -433,9 +439,8 @@ let token = self.downloads.next; self.downloads.next += 1; debug!("downloading {} as {}", path.display(), token); - assert_eq!( - self.downloads.pending_ids.insert(path.to_path_buf(), token), - None, + assert!( + self.downloads.pending_paths.insert(path.to_path_buf()), "path queued for download more than once" ); @@ -485,6 +490,7 @@ data: RefCell::new(Vec::new()), path: path.to_path_buf(), index_version: RefCell::new(None), + retry: Retry::new(self.config)?, }; // Finally add the request we've lined up to the pool of requests that cURL manages. @@ -515,11 +521,11 @@ } } - match self.load(Path::new(""), Path::new("config.json"), None)? { - Poll::Ready(LoadResponse::Data { + match ready!(self.load(Path::new(""), Path::new("config.json"), None)?) { + LoadResponse::Data { raw_data, index_version: _, - }) => { + } => { trace!("config loaded"); self.registry_config = Some(serde_json::from_slice(&raw_data)?); if paths::create_dir_all(&config_json_path.parent().unwrap()).is_ok() { @@ -529,13 +535,12 @@ } Poll::Ready(Ok(self.registry_config.clone())) } - Poll::Ready(LoadResponse::NotFound) => { + LoadResponse::NotFound => { Poll::Ready(Err(anyhow::anyhow!("config.json not found in registry"))) } - Poll::Ready(LoadResponse::CacheValid) => { + LoadResponse::CacheValid => { panic!("config.json is not stored in the index cache") } - Poll::Pending => Poll::Pending, } } @@ -602,7 +607,7 @@ let timeout = self .multi .get_timeout()? - .unwrap_or_else(|| Duration::new(5, 0)); + .unwrap_or_else(|| Duration::new(1, 0)); self.multi .wait(&mut [], timeout) .with_context(|| "failed to wait on curl `Multi`")?; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/index.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/index.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/index.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/index.rs 2023-01-10 13:41:19.000000000 +0000 @@ -80,7 +80,7 @@ use std::io::ErrorKind; use std::path::Path; use std::str; -use std::task::Poll; +use std::task::{ready, Poll}; /// Crates.io treats hyphen and underscores as interchangeable, but the index and old Cargo do not. /// Therefore, the index must store uncanonicalized version of the name so old Cargo's can find it. @@ -268,10 +268,7 @@ pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> Poll> { let req = OptVersionReq::exact(pkg.version()); let summary = self.summaries(pkg.name(), &req, load)?; - let summary = match summary { - Poll::Ready(mut summary) => summary.next(), - Poll::Pending => return Poll::Pending, - }; + let summary = ready!(summary).next(); Poll::Ready(Ok(summary .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))? .summary @@ -302,10 +299,7 @@ // has run previously this will parse a Cargo-specific cache file rather // than the registry itself. In effect this is intended to be a quite // cheap operation. - let summaries = match self.load_summaries(name, load)? { - Poll::Ready(summaries) => summaries, - Poll::Pending => return Poll::Pending, - }; + let summaries = ready!(self.load_summaries(name, load)?); // Iterate over our summaries, extract all relevant ones which match our // version requirement, and then parse all corresponding rows in the @@ -422,12 +416,9 @@ f: &mut dyn FnMut(Summary), ) -> Poll> { if self.config.offline() { - match self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? { - Poll::Ready(0) => {} - Poll::Ready(_) => return Poll::Ready(Ok(())), - Poll::Pending => return Poll::Pending, - } - // If offline, and there are no matches, try again with online. + // This should only return `Poll::Ready(Ok(()))` if there is at least 1 match. + // + // If there are 0 matches it should fall through and try again with online. // This is necessary for dependencies that are not used (such as // target-cfg or optional), but are not downloaded. Normally the // build should succeed if they are not downloaded and not used, @@ -435,6 +426,9 @@ // then cargo will fail to download and an error message // indicating that the required dependency is unavailable while // offline will be displayed. + if ready!(self.query_inner_with_online(dep, load, yanked_whitelist, f, false)?) > 0 { + return Poll::Ready(Ok(())); + } } self.query_inner_with_online(dep, load, yanked_whitelist, f, true) .map_ok(|_| ()) @@ -450,10 +444,7 @@ ) -> Poll> { let source_id = self.source_id; - let summaries = match self.summaries(dep.package_name(), dep.version_req(), load)? { - Poll::Ready(summaries) => summaries, - Poll::Pending => return Poll::Pending, - }; + let summaries = ready!(self.summaries(dep.package_name(), dep.version_req(), load))?; let summaries = summaries // First filter summaries for `--offline`. If we're online then @@ -582,10 +573,7 @@ Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e), } - let response = match load.load(root, relative, index_version.as_deref())? { - Poll::Pending => return Poll::Pending, - Poll::Ready(response) => response, - }; + let response = ready!(load.load(root, relative, index_version.as_deref())?); match response { LoadResponse::CacheValid => { @@ -593,7 +581,6 @@ return Poll::Ready(Ok(cached_summaries)); } LoadResponse::NotFound => { - debug_assert!(cached_summaries.is_none()); if let Err(e) = fs::remove_file(cache_path) { if e.kind() != ErrorKind::NotFound { log::debug!("failed to remove from cache: {}", e); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -182,11 +182,13 @@ use crate::util::interning::InternedString; use crate::util::into_url::IntoUrl; use crate::util::network::PollExt; -use crate::util::{restricted_names, CargoResult, Config, Filesystem, OptVersionReq}; +use crate::util::{ + restricted_names, CargoResult, Config, Filesystem, LimitErrorReader, OptVersionReq, +}; const PACKAGE_SOURCE_LOCK: &str = ".cargo-ok"; pub const CRATES_IO_INDEX: &str = "https://github.com/rust-lang/crates.io-index"; -pub const CRATES_IO_HTTP_INDEX: &str = "sparse+https://index.crates.io/"; +pub const CRATES_IO_HTTP_INDEX: &str = "https://index.crates.io/"; pub const CRATES_IO_REGISTRY: &str = "crates-io"; pub const CRATES_IO_DOMAIN: &str = "crates.io"; const CRATE_TEMPLATE: &str = "{crate}"; @@ -194,6 +196,7 @@ const PREFIX_TEMPLATE: &str = "{prefix}"; const LOWER_PREFIX_TEMPLATE: &str = "{lowerprefix}"; const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}"; +const MAX_UNPACK_SIZE: u64 = 512 * 1024 * 1024; /// A "source" for a local (see `local::LocalRegistry`) or remote (see /// `remote::RemoteRegistry`) registry. @@ -399,7 +402,7 @@ // In index, "registry" is null if it is from the same index. // In Cargo.toml, "registry" is None if it is from the default - if !id.is_default_registry() { + if !id.is_crates_io() { dep.set_registry_id(id); } @@ -546,7 +549,7 @@ config: &'cfg Config, ) -> CargoResult> { let name = short_name(source_id); - let ops = if source_id.url().scheme().starts_with("sparse+") { + let ops = if source_id.is_sparse() { Box::new(http_remote::HttpRegistry::new(source_id, config, &name)?) as Box<_> } else { Box::new(remote::RemoteRegistry::new(source_id, config, &name)) as Box<_> @@ -615,6 +618,7 @@ } } let gz = GzDecoder::new(tarball); + let gz = LimitErrorReader::new(gz, max_unpack_size()); let mut tar = Archive::new(gz); let prefix = unpack_dir.file_name().unwrap(); let parent = unpack_dir.parent().unwrap(); @@ -639,6 +643,13 @@ prefix ) } + // Prevent unpacking the lockfile from the crate itself. + if entry_path + .file_name() + .map_or(false, |p| p == PACKAGE_SOURCE_LOCK) + { + continue; + } // Unpacking failed let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from); if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) { @@ -654,16 +665,14 @@ .with_context(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; } - // The lock file is created after unpacking so we overwrite a lock file - // which may have been extracted from the package. + // Now that we've finished unpacking, create and write to the lock file to indicate that + // unpacking was successful. let mut ok = OpenOptions::new() - .create(true) + .create_new(true) .read(true) .write(true) .open(&path) .with_context(|| format!("failed to open `{}`", path.display()))?; - - // Write to the lock file to indicate that unpacking was successful. write!(ok, "ok")?; Ok(unpack_dir.to_path_buf()) @@ -826,6 +835,20 @@ } } +/// For integration test only. +#[inline] +fn max_unpack_size() -> u64 { + const VAR: &str = "__CARGO_TEST_MAX_UNPACK_SIZE"; + if cfg!(debug_assertions) && std::env::var(VAR).is_ok() { + std::env::var(VAR) + .unwrap() + .parse() + .expect("a max unpack size in bytes") + } else { + MAX_UNPACK_SIZE + } +} + fn make_dep_prefix(name: &str) -> String { match name.len() { 1 => String::from("1"), diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/remote.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/remote.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/registry/remote.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/registry/remote.rs 2023-01-10 13:41:19.000000000 +0000 @@ -15,7 +15,7 @@ use std::mem; use std::path::Path; use std::str; -use std::task::Poll; +use std::task::{ready, Poll}; /// A remote registry is a registry that lives at a remote URL (such as /// crates.io). The git index is cloned locally, and `.crate` files are @@ -32,7 +32,6 @@ head: Cell>, current_sha: Cell>, needs_update: bool, // Does this registry need to be updated? - updated: bool, // Has this registry been updated this session? } impl<'cfg> RemoteRegistry<'cfg> { @@ -49,7 +48,6 @@ head: Cell::new(None), current_sha: Cell::new(None), needs_update: false, - updated: false, } } @@ -141,6 +139,14 @@ self.current_sha.set(Some(sha)); Some(sha) } + + fn is_updated(&self) -> bool { + self.config.updated_sources().contains(&self.source_id) + } + + fn mark_updated(&self) { + self.config.updated_sources().insert(self.source_id); + } } const LAST_UPDATED_FILE: &str = ".last-updated"; @@ -214,7 +220,7 @@ match load_helper(&self, path, index_version) { Ok(result) => Poll::Ready(Ok(result)), - Err(_) if !self.updated => { + Err(_) if !self.is_updated() => { // If git returns an error and we haven't updated the repo, return // pending to allow an update to try again. self.needs_update = true; @@ -236,13 +242,12 @@ debug!("loading config"); self.prepare()?; self.config.assert_package_cache_locked(&self.index_path); - match self.load(Path::new(""), Path::new("config.json"), None)? { - Poll::Ready(LoadResponse::Data { raw_data, .. }) => { + match ready!(self.load(Path::new(""), Path::new("config.json"), None)?) { + LoadResponse::Data { raw_data, .. } => { trace!("config loaded"); Poll::Ready(Ok(Some(serde_json::from_slice(&raw_data)?))) } - Poll::Ready(_) => Poll::Ready(Ok(None)), - Poll::Pending => Poll::Pending, + _ => Poll::Ready(Ok(None)), } } @@ -251,19 +256,20 @@ return Ok(()); } - self.updated = true; self.needs_update = false; - if self.config.offline() { + // Make sure the index is only updated once per session since it is an + // expensive operation. This generally only happens when the resolver + // is run multiple times, such as during `cargo publish`. + if self.is_updated() { return Ok(()); } - if self.config.cli_unstable().no_index_update { + self.mark_updated(); + + if self.config.offline() { return Ok(()); } - // Make sure the index is only updated once per session since it is an - // expensive operation. This generally only happens when the resolver - // is run multiple times, such as during `cargo publish`. - if self.config.updated_sources().contains(&self.source_id) { + if self.config.cli_unstable().no_index_update { return Ok(()); } @@ -292,7 +298,6 @@ let repo = self.repo.borrow_mut().unwrap(); git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) .with_context(|| format!("failed to fetch `{}`", url))?; - self.config.updated_sources().insert(self.source_id); // Create a dummy file to record the mtime for when we updated the // index. @@ -302,13 +307,12 @@ } fn invalidate_cache(&mut self) { - if !self.updated { - self.needs_update = true; - } + // To fully invalidate, undo `mark_updated`s work + self.needs_update = true; } fn is_updated(&self) -> bool { - self.updated + self.is_updated() } fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/replaced.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/replaced.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/sources/replaced.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/sources/replaced.rs 2023-01-10 13:41:19.000000000 +0000 @@ -100,11 +100,17 @@ } fn describe(&self) -> String { - format!( - "{} (which is replacing {})", - self.inner.describe(), - self.to_replace - ) + if self.replace_with.is_crates_io() && self.to_replace.is_crates_io() { + // Built-in source replacement of crates.io for sparse registry or tests + // doesn't need duplicate description (crates.io replacing crates.io). + self.inner.describe() + } else { + format!( + "{} (which is replacing {})", + self.inner.describe(), + self.to_replace + ) + } } fn is_replaced(&self) -> bool { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/command_prelude.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/command_prelude.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/command_prelude.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/command_prelude.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,6 @@ use crate::core::resolver::CliFeatures; use crate::core::{Edition, Workspace}; use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; -use crate::sources::CRATES_IO_REGISTRY; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::interning::InternedString; use crate::util::restricted_names::is_glob_pattern; @@ -20,12 +19,12 @@ pub use crate::core::compiler::CompileMode; pub use crate::{CliError, CliResult, Config}; -pub use clap::{value_parser, AppSettings, Arg, ArgAction, ArgMatches}; +pub use clap::{value_parser, Arg, ArgAction, ArgMatches}; -pub type App = clap::Command<'static>; +pub use clap::Command; -pub trait AppExt: Sized { - fn _arg(self, arg: Arg<'static>) -> Self; +pub trait CommandExt: Sized { + fn _arg(self, arg: Arg) -> Self; /// Do not use this method, it is only for backwards compatibility. /// Use `arg_package_spec_no_all` instead. @@ -255,50 +254,42 @@ } } -impl AppExt for App { - fn _arg(self, arg: Arg<'static>) -> Self { +impl CommandExt for Command { + fn _arg(self, arg: Arg) -> Self { self.arg(arg) } } -pub fn flag(name: &'static str, help: &'static str) -> Arg<'static> { +pub fn flag(name: &'static str, help: &'static str) -> Arg { Arg::new(name) .long(name) .help(help) .action(ArgAction::SetTrue) } -pub fn opt(name: &'static str, help: &'static str) -> Arg<'static> { - Arg::new(name).long(name).help(help) +pub fn opt(name: &'static str, help: &'static str) -> Arg { + Arg::new(name).long(name).help(help).action(ArgAction::Set) } -pub fn optional_opt(name: &'static str, help: &'static str) -> Arg<'static> { - opt(name, help).min_values(0) +pub fn optional_opt(name: &'static str, help: &'static str) -> Arg { + opt(name, help).num_args(0..=1) } -pub fn optional_multi_opt( - name: &'static str, - value_name: &'static str, - help: &'static str, -) -> Arg<'static> { +pub fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Arg { opt(name, help) .value_name(value_name) + .num_args(0..=1) .action(ArgAction::Append) - .multiple_values(true) - .min_values(0) - .number_of_values(1) } -pub fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Arg<'static> { +pub fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Arg { opt(name, help) .value_name(value_name) .action(ArgAction::Append) } -pub fn subcommand(name: &'static str) -> App { - App::new(name) - .dont_collapse_args_in_usage(true) - .setting(AppSettings::DeriveDisplayOrder) +pub fn subcommand(name: &'static str) -> Command { + Command::new(name) } /// Determines whether or not to gate `--profile` as unstable when resolving it. @@ -319,7 +310,7 @@ None => None, Some(arg) => Some(arg.parse::().map_err(|_| { clap::Error::raw( - clap::ErrorKind::ValueValidation, + clap::error::ErrorKind::ValueValidation, format!("Invalid value: could not parse `{}` as a number", arg), ) })?), @@ -332,7 +323,7 @@ None => None, Some(arg) => Some(arg.parse::().map_err(|_| { clap::Error::raw( - clap::ErrorKind::ValueValidation, + clap::error::ErrorKind::ValueValidation, format!("Invalid value: could not parse `{}` as a number", arg), ) })?), @@ -609,7 +600,6 @@ target_rustdoc_args: None, target_rustc_args: None, target_rustc_crate_types: None, - local_rustdoc_args: None, rustdoc_document_private_items: false, honor_rust_version: !self.flag("ignore-rust-version"), }; @@ -677,17 +667,7 @@ match self._value_of("registry") { Some(registry) => { validate_package_name(registry, "registry name", "")?; - - if registry == CRATES_IO_REGISTRY { - // If "crates.io" is specified, then we just need to return `None`, - // as that will cause cargo to use crates.io. This is required - // for the case where a default alternative registry is used - // but the user wants to switch back to crates.io for a single - // command. - Ok(None) - } else { - Ok(Some(registry.to_string())) - } + Ok(Some(registry.to_string())) } None => config.default_registry(), } @@ -792,7 +772,7 @@ } #[track_caller] -fn ignore_unknown(r: Result) -> T { +pub fn ignore_unknown(r: Result) -> T { match r { Ok(t) => t, Err(clap::parser::MatchesError::UnknownArgument { .. }) => Default::default(), diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/config/de.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/config/de.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/config/de.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/config/de.rs 2023-01-10 13:41:19.000000000 +0000 @@ -80,7 +80,7 @@ where V: de::Visitor<'de>, { - if self.config.has_key(&self.key, self.env_prefix_ok) { + if self.config.has_key(&self.key, self.env_prefix_ok)? { visitor.visit_some(self) } else { // Treat missing values as `None`. @@ -384,7 +384,12 @@ { match self.list_iter.next() { // TODO: add `def` to error? - Some((value, _def)) => seed.deserialize(value.into_deserializer()).map(Some), + Some((value, def)) => { + // This might be a String or a Value. + // ValueDeserializer will handle figuring out which one it is. + let maybe_value_de = ValueDeserializer::new_with_string(value, def); + seed.deserialize(maybe_value_de).map(Some) + } None => Ok(None), } } @@ -400,7 +405,17 @@ struct ValueDeserializer<'config> { hits: u32, definition: Definition, - de: Deserializer<'config>, + /// The deserializer, used to actually deserialize a Value struct. + /// This is `None` if deserializing a string. + de: Option>, + /// A string value to deserialize. + /// + /// This is used for situations where you can't address a string via a + /// TOML key, such as a string inside an array. The `ConfigSeqAccess` + /// doesn't know if the type it should deserialize to is a `String` or + /// `Value`, so `ValueDeserializer` needs to be able to handle + /// both. + str_value: Option, } impl<'config> ValueDeserializer<'config> { @@ -428,9 +443,19 @@ Ok(ValueDeserializer { hits: 0, definition, - de, + de: Some(de), + str_value: None, }) } + + fn new_with_string(s: String, definition: Definition) -> ValueDeserializer<'config> { + ValueDeserializer { + hits: 0, + definition, + de: None, + str_value: Some(s), + } + } } impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> { @@ -459,9 +484,14 @@ // If this is the first time around we deserialize the `value` field // which is the actual deserializer if self.hits == 1 { - return seed - .deserialize(self.de.clone()) - .map_err(|e| e.with_key_context(&self.de.key, self.definition.clone())); + if let Some(de) = &self.de { + return seed + .deserialize(de.clone()) + .map_err(|e| e.with_key_context(&de.key, self.definition.clone())); + } else { + return seed + .deserialize(self.str_value.as_ref().unwrap().clone().into_deserializer()); + } } // ... otherwise we're deserializing the `definition` field, so we need @@ -473,8 +503,79 @@ Definition::Environment(env) => { seed.deserialize(Tuple2Deserializer(1i32, env.as_str())) } - Definition::Cli => seed.deserialize(Tuple2Deserializer(2i32, "")), + Definition::Cli(path) => { + let str = path + .as_ref() + .map(|p| p.to_string_lossy()) + .unwrap_or_default(); + seed.deserialize(Tuple2Deserializer(2i32, str)) + } + } + } +} + +// Deserializer is only implemented to handle deserializing a String inside a +// sequence (like `Vec` or `Vec>`). `Value` is +// handled by deserialize_struct, and the plain `String` is handled by all the +// other functions here. +impl<'de, 'config> de::Deserializer<'de> for ValueDeserializer<'config> { + type Error = ConfigError; + + fn deserialize_str(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_str(&self.str_value.expect("string expected")) + } + + fn deserialize_string(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_string(self.str_value.expect("string expected")) + } + + fn deserialize_struct( + self, + name: &'static str, + fields: &'static [&'static str], + visitor: V, + ) -> Result + where + V: de::Visitor<'de>, + { + // Match on the magical struct name/field names that are passed in to + // detect when we're deserializing `Value`. + // + // See more comments in `value.rs` for the protocol used here. + if name == value::NAME && fields == value::FIELDS { + return visitor.visit_map(self); } + unimplemented!("only strings and Value can be deserialized from a sequence"); + } + + fn deserialize_any(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_string(self.str_value.expect("string expected")) + } + + fn deserialize_ignored_any(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_unit() + } + + serde::forward_to_deserialize_any! { + i8 i16 i32 i64 + u8 u16 u32 u64 + option + newtype_struct seq tuple tuple_struct map enum bool + f32 f64 char bytes + byte_buf unit unit_struct + identifier } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/config/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/config/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/config/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/config/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -121,6 +121,20 @@ }; } +/// Indicates why a config value is being loaded. +#[derive(Clone, Copy, Debug)] +enum WhyLoad { + /// Loaded due to a request from the global cli arg `--config` + /// + /// Indirect configs loaded via [`config-include`] are also seen as from cli args, + /// if the initial config is being loaded from cli. + /// + /// [`config-include`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#config-include + Cli, + /// Loaded due to config file discovery. + FileDiscovery, +} + /// Configuration information for cargo. This is not specific to a build, it is information /// relating to cargo itself. #[derive(Debug)] @@ -313,6 +327,18 @@ &self.home_path } + /// Returns a path to display to the user with the location of their home + /// config file (to only be used for displaying a diagnostics suggestion, + /// such as recommending where to add a config value). + pub fn diagnostic_home_config(&self) -> String { + let home = self.home_path.as_path_unlocked(); + let path = match self.get_file_path(home, "config", false) { + Ok(Some(existing_path)) => existing_path, + _ => home.join("config.toml"), + }; + path.to_string_lossy().to_string() + } + /// Gets the Cargo Git directory (`/git`). pub fn git_path(&self) -> Filesystem { self.home_path.join("git") @@ -682,25 +708,25 @@ } } - fn has_key(&self, key: &ConfigKey, env_prefix_ok: bool) -> bool { + /// Check if the [`Config`] contains a given [`ConfigKey`]. + /// + /// See `ConfigMapAccess` for a description of `env_prefix_ok`. + fn has_key(&self, key: &ConfigKey, env_prefix_ok: bool) -> CargoResult { if self.env.contains_key(key.as_env_key()) { - return true; + return Ok(true); } - // See ConfigMapAccess for a description of this. if env_prefix_ok { let env_prefix = format!("{}_", key.as_env_key()); if self.env.keys().any(|k| k.starts_with(&env_prefix)) { - return true; + return Ok(true); } } - if let Ok(o_cv) = self.get_cv(key) { - if o_cv.is_some() { - return true; - } + if self.get_cv(key)?.is_some() { + return Ok(true); } self.check_environment_key_case_mismatch(key); - false + Ok(false) } fn check_environment_key_case_mismatch(&self, key: &ConfigKey) { @@ -1005,12 +1031,15 @@ self.load_values_from(&self.cwd) } + /// Like [`load_values`](Config::load_values) but without merging config values. + /// + /// This is primarily crafted for `cargo config` command. pub(crate) fn load_values_unmerged(&self) -> CargoResult> { let mut result = Vec::new(); let mut seen = HashSet::new(); let home = self.home_path.clone().into_path_unlocked(); self.walk_tree(&self.cwd, &home, |path| { - let mut cv = self._load_file(path, &mut seen, false)?; + let mut cv = self._load_file(path, &mut seen, false, WhyLoad::FileDiscovery)?; if self.cli_unstable().config_include { self.load_unmerged_include(&mut cv, &mut seen, &mut result)?; } @@ -1021,6 +1050,9 @@ Ok(result) } + /// Like [`load_includes`](Config::load_includes) but without merging config values. + /// + /// This is primarily crafted for `cargo config` command. fn load_unmerged_include( &self, cv: &mut CV, @@ -1029,15 +1061,18 @@ ) -> CargoResult<()> { let includes = self.include_paths(cv, false)?; for (path, abs_path, def) in includes { - let mut cv = self._load_file(&abs_path, seen, false).with_context(|| { - format!("failed to load config include `{}` from `{}`", path, def) - })?; + let mut cv = self + ._load_file(&abs_path, seen, false, WhyLoad::FileDiscovery) + .with_context(|| { + format!("failed to load config include `{}` from `{}`", path, def) + })?; self.load_unmerged_include(&mut cv, seen, output)?; output.push(cv); } Ok(()) } + /// Start a config file discovery from a path and merges all config values found. fn load_values_from(&self, path: &Path) -> CargoResult> { // This definition path is ignored, this is just a temporary container // representing the entire file. @@ -1045,7 +1080,7 @@ let home = self.home_path.clone().into_path_unlocked(); self.walk_tree(path, &home, |path| { - let value = self.load_file(path, true)?; + let value = self.load_file(path)?; cfg.merge(value, false).with_context(|| { format!("failed to merge configuration at `{}`", path.display()) })?; @@ -1059,15 +1094,28 @@ } } - fn load_file(&self, path: &Path, includes: bool) -> CargoResult { - self._load_file(path, &mut HashSet::new(), includes) + /// Loads a config value from a path. + /// + /// This is used during config file discovery. + fn load_file(&self, path: &Path) -> CargoResult { + self._load_file(path, &mut HashSet::new(), true, WhyLoad::FileDiscovery) } + /// Loads a config value from a path with options. + /// + /// This is actual implementation of loading a config value from a path. + /// + /// * `includes` determines whether to load configs from [`config-include`]. + /// * `seen` is used to check for cyclic includes. + /// * `why_load` tells why a config is being loaded. + /// + /// [`config-include`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#config-include fn _load_file( &self, path: &Path, seen: &mut HashSet, includes: bool, + why_load: WhyLoad, ) -> CargoResult { if !seen.insert(path.to_path_buf()) { bail!( @@ -1080,15 +1128,18 @@ let toml = cargo_toml::parse(&contents, path, self).with_context(|| { format!("could not parse TOML configuration in `{}`", path.display()) })?; - let value = - CV::from_toml(Definition::Path(path.to_path_buf()), toml).with_context(|| { - format!( - "failed to load TOML configuration from `{}`", - path.display() - ) - })?; + let def = match why_load { + WhyLoad::Cli => Definition::Cli(Some(path.into())), + WhyLoad::FileDiscovery => Definition::Path(path.into()), + }; + let value = CV::from_toml(def, toml).with_context(|| { + format!( + "failed to load TOML configuration from `{}`", + path.display() + ) + })?; if includes { - self.load_includes(value, seen) + self.load_includes(value, seen, why_load) } else { Ok(value) } @@ -1098,8 +1149,14 @@ /// /// Returns `value` with the given include files merged into it. /// - /// `seen` is used to check for cyclic includes. - fn load_includes(&self, mut value: CV, seen: &mut HashSet) -> CargoResult { + /// * `seen` is used to check for cyclic includes. + /// * `why_load` tells why a config is being loaded. + fn load_includes( + &self, + mut value: CV, + seen: &mut HashSet, + why_load: WhyLoad, + ) -> CargoResult { // Get the list of files to load. let includes = self.include_paths(&mut value, true)?; // Check unstable. @@ -1109,7 +1166,7 @@ // Accumulate all values here. let mut root = CV::Table(HashMap::new(), value.definition().clone()); for (path, abs_path, def) in includes { - self._load_file(&abs_path, seen, true) + self._load_file(&abs_path, seen, true, why_load) .and_then(|include| root.merge(include, true)) .with_context(|| { format!("failed to load config include `{}` from `{}`", path, def) @@ -1127,8 +1184,8 @@ ) -> CargoResult> { let abs = |path: &str, def: &Definition| -> (String, PathBuf, Definition) { let abs_path = match def { - Definition::Path(p) => p.parent().unwrap().join(&path), - Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), + Definition::Path(p) | Definition::Cli(Some(p)) => p.parent().unwrap().join(&path), + Definition::Environment(_) | Definition::Cli(None) => self.cwd().join(&path), }; (path.to_string(), abs_path, def.clone()) }; @@ -1162,7 +1219,7 @@ /// Parses the CLI config args and returns them as a table. pub(crate) fn cli_args_as_table(&self) -> CargoResult { - let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); + let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli(None)); let cli_args = match &self.cli_config { Some(cli_args) => cli_args, None => return Ok(loaded_args), @@ -1178,7 +1235,7 @@ anyhow::format_err!("config path {:?} is not utf-8", arg_as_path) })? .to_string(); - self._load_file(&self.cwd().join(&str_path), &mut seen, true) + self._load_file(&self.cwd().join(&str_path), &mut seen, true, WhyLoad::Cli) .with_context(|| format!("failed to load config from `{}`", str_path))? } else { // We only want to allow "dotted key" (see https://toml.io/en/v1.0.0#keys) @@ -1273,11 +1330,11 @@ ); } - CV::from_toml(Definition::Cli, toml_v) + CV::from_toml(Definition::Cli(None), toml_v) .with_context(|| format!("failed to convert --config argument `{arg}`"))? }; let tmp_table = self - .load_includes(tmp_table, &mut HashSet::new()) + .load_includes(tmp_table, &mut HashSet::new(), WhyLoad::Cli) .with_context(|| "failed to load --config include".to_string())?; loaded_args .merge(tmp_table, true) @@ -1431,7 +1488,7 @@ None => return Ok(()), }; - let mut value = self.load_file(&credentials, true)?; + let mut value = self.load_file(&credentials)?; // Backwards compatibility for old `.cargo/credentials` layout. { let (value_map, def) = match value { @@ -2127,9 +2184,10 @@ frequency: Option, } -#[derive(Debug, Deserialize, PartialEq)] +#[derive(Debug, Default, Deserialize, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum CargoFutureIncompatFrequencyConfig { + #[default] Always, Never, } @@ -2146,12 +2204,6 @@ } } -impl Default for CargoFutureIncompatFrequencyConfig { - fn default() -> Self { - Self::Always - } -} - /// Configuration for `ssl-version` in `http` section /// There are two ways to configure: /// @@ -2184,6 +2236,13 @@ pub retry: Option, pub offline: Option, pub git_fetch_with_cli: Option, + pub ssh: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct CargoSshConfig { + pub known_hosts: Option>>, } #[derive(Debug, Deserialize)] @@ -2270,20 +2329,15 @@ pub width: Option, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Default, Deserialize)] #[serde(rename_all = "lowercase")] pub enum ProgressWhen { + #[default] Auto, Never, Always, } -impl Default for ProgressWhen { - fn default() -> ProgressWhen { - ProgressWhen::Auto - } -} - fn progress_or_string<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::de::Deserializer<'de>, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/config/value.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/config/value.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/config/value.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/config/value.rs 2023-01-10 13:41:19.000000000 +0000 @@ -59,7 +59,8 @@ /// Defined in an environment variable, includes the environment key. Environment(String), /// Passed in on the command line. - Cli, + /// A path is attached when the config value is a path to a config file. + Cli(Option), } impl Definition { @@ -69,8 +70,8 @@ /// CLI and env are the current working directory. pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path { match self { - Definition::Path(p) => p.parent().unwrap().parent().unwrap(), - Definition::Environment(_) | Definition::Cli => config.cwd(), + Definition::Path(p) | Definition::Cli(Some(p)) => p.parent().unwrap().parent().unwrap(), + Definition::Environment(_) | Definition::Cli(None) => config.cwd(), } } @@ -80,8 +81,8 @@ pub fn is_higher_priority(&self, other: &Definition) -> bool { matches!( (self, other), - (Definition::Cli, Definition::Environment(_)) - | (Definition::Cli, Definition::Path(_)) + (Definition::Cli(_), Definition::Environment(_)) + | (Definition::Cli(_), Definition::Path(_)) | (Definition::Environment(_), Definition::Path(_)) ) } @@ -100,9 +101,9 @@ impl fmt::Display for Definition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Definition::Path(p) => p.display().fmt(f), + Definition::Path(p) | Definition::Cli(Some(p)) => p.display().fmt(f), Definition::Environment(key) => write!(f, "environment variable `{}`", key), - Definition::Cli => write!(f, "--config cli option"), + Definition::Cli(None) => write!(f, "--config cli option"), } } } @@ -218,8 +219,11 @@ match discr { 0 => Ok(Definition::Path(value.into())), 1 => Ok(Definition::Environment(value)), - 2 => Ok(Definition::Cli), - _ => panic!("unexpected discriminant {} value {}", discr, value), + 2 => { + let path = (value.len() > 0).then_some(value.into()); + Ok(Definition::Cli(path)) + } + _ => panic!("unexpected discriminant {discr} value {value}"), } } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/errors.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/errors.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/errors.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/errors.rs 2023-01-10 13:41:19.000000000 +0000 @@ -4,25 +4,32 @@ use std::fmt; use std::path::PathBuf; +use super::truncate_with_ellipsis; + pub type CargoResult = anyhow::Result; #[derive(Debug)] -pub struct HttpNot200 { +pub struct HttpNotSuccessful { pub code: u32, pub url: String, + pub body: Vec, } -impl fmt::Display for HttpNot200 { +impl fmt::Display for HttpNotSuccessful { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let body = std::str::from_utf8(&self.body) + .map(|s| truncate_with_ellipsis(s, 512)) + .unwrap_or_else(|_| format!("[{} non-utf8 bytes]", self.body.len())); + write!( f, - "failed to get 200 response from `{}`, got {}", + "failed to get successful HTTP response from `{}`, got {}\nbody:\n{body}", self.url, self.code ) } } -impl std::error::Error for HttpNot200 {} +impl std::error::Error for HttpNotSuccessful {} // ============================================================================= // Verbose error diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/io.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/io.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/io.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/io.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,51 @@ +use std::io::{self, Read, Take}; + +#[derive(Debug)] +pub struct LimitErrorReader { + inner: Take, +} + +impl LimitErrorReader { + pub fn new(r: R, limit: u64) -> LimitErrorReader { + LimitErrorReader { + inner: r.take(limit), + } + } +} + +impl Read for LimitErrorReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + match self.inner.read(buf) { + Ok(0) if self.inner.limit() == 0 => Err(io::Error::new( + io::ErrorKind::Other, + "maximum limit reached when reading", + )), + e => e, + } + } +} + +#[cfg(test)] +mod tests { + use super::LimitErrorReader; + + use std::io::Read; + + #[test] + fn under_the_limit() { + let buf = &[1; 7][..]; + let mut r = LimitErrorReader::new(buf, 8); + let mut out = Vec::new(); + assert!(matches!(r.read_to_end(&mut out), Ok(7))); + assert_eq!(buf, out.as_slice()); + } + + #[test] + #[should_panic = "maximum limit reached when reading"] + fn over_the_limit() { + let buf = &[1; 8][..]; + let mut r = LimitErrorReader::new(buf, 8); + let mut out = Vec::new(); + r.read_to_end(&mut out).unwrap(); + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -14,6 +14,7 @@ pub use self::hex::{hash_u64, short_hash, to_hex}; pub use self::into_url::IntoUrl; pub use self::into_url_with_base::IntoUrlWithBase; +pub(crate) use self::io::LimitErrorReader; pub use self::lev_distance::{closest, closest_msg, lev_distance}; pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; pub use self::progress::{Progress, ProgressStyle}; @@ -44,6 +45,7 @@ pub mod interning; pub mod into_url; mod into_url_with_base; +mod io; pub mod job; pub mod lev_distance; mod lockserver; @@ -57,6 +59,7 @@ mod semver_ext; pub mod to_semver; pub mod toml; +pub mod toml_mut; mod vcs; mod workspace; @@ -107,3 +110,15 @@ }) .collect() } + +pub fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { + // We should truncate at grapheme-boundary and compute character-widths, + // yet the dependencies on unicode-segmentation and unicode-width are + // not worth it. + let mut chars = s.chars(); + let mut prefix = (&mut chars).take(max_width - 1).collect::(); + if chars.next().is_some() { + prefix.push('…'); + } + prefix +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/network.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/network.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/network.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/network.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,6 @@ use anyhow::Error; -use crate::util::errors::{CargoResult, HttpNot200}; +use crate::util::errors::{CargoResult, HttpNotSuccessful}; use crate::util::Config; use std::task::Poll; @@ -31,6 +31,7 @@ }) } + /// Returns `Ok(None)` for operations that should be re-tried. pub fn r#try(&mut self, f: impl FnOnce() -> CargoResult) -> CargoResult> { match f() { Err(ref e) if maybe_spurious(e) && self.remaining > 0 => { @@ -73,7 +74,7 @@ return true; } } - if let Some(not_200) = err.downcast_ref::() { + if let Some(not_200) = err.downcast_ref::() { if 500 <= not_200.code && not_200.code < 600 { return true; } @@ -114,14 +115,16 @@ use crate::core::Shell; //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry - let error1 = HttpNot200 { + let error1 = HttpNotSuccessful { code: 501, url: "Uri".to_string(), + body: Vec::new(), } .into(); - let error2 = HttpNot200 { + let error2 = HttpNotSuccessful { code: 502, url: "Uri".to_string(), + body: Vec::new(), } .into(); let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; @@ -137,14 +140,16 @@ //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry //String error messages are not considered spurious - let error1 = anyhow::Error::from(HttpNot200 { + let error1 = anyhow::Error::from(HttpNotSuccessful { code: 501, url: "Uri".to_string(), + body: Vec::new(), }); let error1 = anyhow::Error::from(error1.context("A non-spurious wrapping err")); - let error2 = anyhow::Error::from(HttpNot200 { + let error2 = anyhow::Error::from(HttpNotSuccessful { code: 502, url: "Uri".to_string(), + body: Vec::new(), }); let error2 = anyhow::Error::from(error2.context("A second chained error")); let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/profile.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/profile.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/profile.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/profile.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,3 +1,9 @@ +//! # An internal profiler for Cargo itself +//! +//! > **Note**: This might not be the module you are looking for. +//! > For information about how Cargo handles compiler flags with profiles, +//! > please see the module [`cargo::core::profiles`](crate::core::profiles). + use std::cell::RefCell; use std::env; use std::fmt; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -419,8 +419,8 @@ #[serde(rename_all = "kebab-case")] pub struct TomlManifest { cargo_features: Option>, - package: Option>, - project: Option>, + package: Option>, + project: Option>, profile: Option, lib: Option, bin: Option>, @@ -1007,13 +1007,30 @@ /// Enum that allows for the parsing of `field.workspace = true` in a Cargo.toml /// /// It allows for things to be inherited from a workspace or defined as needed -#[derive(Deserialize, Serialize, Clone, Debug)] +#[derive(Serialize, Clone, Debug)] #[serde(untagged)] pub enum MaybeWorkspace { Workspace(TomlWorkspaceField), Defined(T), } +impl<'de, T: Deserialize<'de>> de::Deserialize<'de> for MaybeWorkspace { + fn deserialize(deserializer: D) -> Result, D::Error> + where + D: de::Deserializer<'de>, + { + let value = serde_value::Value::deserialize(deserializer)?; + if let Ok(workspace) = TomlWorkspaceField::deserialize(serde_value::ValueDeserializer::< + D::Error, + >::new(value.clone())) + { + return Ok(MaybeWorkspace::Workspace(workspace)); + } + T::deserialize(serde_value::ValueDeserializer::::new(value)) + .map(MaybeWorkspace::Defined) + } +} + impl MaybeWorkspace { fn resolve<'a>( self, @@ -1041,43 +1058,6 @@ } } -fn maybe_workspace_vec_string<'de, D>( - deserializer: D, -) -> Result>>, D::Error> -where - D: de::Deserializer<'de>, -{ - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = Option>>; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("vector of strings") - } - - fn visit_seq(self, v: V) -> Result - where - V: de::SeqAccess<'de>, - { - let seq = de::value::SeqAccessDeserializer::new(v); - let defined = Vec::::deserialize(seq).map(MaybeWorkspace::Defined)?; - Ok(Some(defined)) - } - - fn visit_map(self, map: V) -> Result - where - V: de::MapAccess<'de>, - { - let mvd = de::value::MapAccessDeserializer::new(map); - let workspace = TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)?; - Ok(Some(workspace)) - } - } - - deserializer.deserialize_any(Visitor) -} - #[derive(Deserialize, Serialize, Clone, Debug)] pub struct TomlWorkspaceField { workspace: bool, @@ -1091,14 +1071,12 @@ /// tables. #[derive(Deserialize, Serialize, Clone, Debug)] #[serde(rename_all = "kebab-case")] -pub struct TomlProject { +pub struct TomlPackage { edition: Option>, rust_version: Option>, name: InternedString, #[serde(deserialize_with = "version_trim_whitespace")] version: MaybeWorkspace, - #[serde(default)] - #[serde(deserialize_with = "maybe_workspace_vec_string")] authors: Option>>, build: Option, metabuild: Option, @@ -1107,11 +1085,7 @@ #[serde(rename = "forced-target")] forced_target: Option, links: Option, - #[serde(default)] - #[serde(deserialize_with = "maybe_workspace_vec_string")] exclude: Option>>, - #[serde(default)] - #[serde(deserialize_with = "maybe_workspace_vec_string")] include: Option>>, publish: Option>, workspace: Option, @@ -1127,11 +1101,7 @@ homepage: Option>, documentation: Option>, readme: Option>, - #[serde(default)] - #[serde(deserialize_with = "maybe_workspace_vec_string")] keywords: Option>>, - #[serde(default)] - #[serde(deserialize_with = "maybe_workspace_vec_string")] categories: Option>>, license: Option>, license_file: Option>, @@ -1259,7 +1229,7 @@ } pub fn readme(&self, package_root: &Path) -> CargoResult { - readme_for_project(self.ws_root.as_path(), self.readme.clone()).map_or( + readme_for_package(self.ws_root.as_path(), self.readme.clone()).map_or( Err(anyhow!("`workspace.package.readme` was not defined")), |readme| { let rel_path = @@ -1351,7 +1321,7 @@ } } -impl TomlProject { +impl TomlPackage { pub fn to_package_id( &self, source_id: SourceId, @@ -1551,8 +1521,7 @@ d.rev.take(); // registry specifications are elaborated to the index URL if let Some(registry) = d.registry.take() { - let src = SourceId::alt_registry(config, ®istry)?; - d.registry_index = Some(src.url().to_string()); + d.registry_index = Some(config.get_registry_index(®istry)?.to_string()); } Ok(TomlDependency::Detailed(d)) } @@ -1608,10 +1577,32 @@ let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?; - let project = me.project.clone().or_else(|| me.package.clone()); - let project = &mut project.ok_or_else(|| anyhow!("no `package` section found"))?; + let mut package = match (&me.package, &me.project) { + (Some(_), Some(project)) => { + if source_id.is_path() { + config.shell().warn(format!( + "manifest at `{}` contains both `project` and `package`, \ + this could become a hard error in the future", + package_root.display() + ))?; + } + project.clone() + } + (Some(package), None) => package.clone(), + (None, Some(project)) => { + if source_id.is_path() { + config.shell().warn(format!( + "manifest at `{}` contains `[project]` instead of `[package]`, \ + this could become a hard error in the future", + package_root.display() + ))?; + } + project.clone() + } + (None, None) => bail!("no `package` section found"), + }; - let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) { + let workspace_config = match (me.workspace.as_ref(), package.workspace.as_ref()) { (Some(toml_config), None) => { let mut inheritable = toml_config.package.clone().unwrap_or_default(); inheritable.update_ws_path(package_root.to_path_buf()); @@ -1639,7 +1630,7 @@ ), }; - let package_name = project.name.trim(); + let package_name = package.name.trim(); if package_name.is_empty() { bail!("package name cannot be an empty string") } @@ -1652,21 +1643,21 @@ let inherit = || inherit_cell.try_borrow_with(|| get_ws(config, &resolved_path, &workspace_config)); - let version = project + let version = package .version .clone() .resolve("version", || inherit()?.version())?; - project.version = MaybeWorkspace::Defined(version.clone()); + package.version = MaybeWorkspace::Defined(version.clone()); - let pkgid = project.to_package_id(source_id, version)?; + let pkgid = package.to_package_id(source_id, version)?; - let edition = if let Some(edition) = project.edition.clone() { + let edition = if let Some(edition) = package.edition.clone() { let edition: Edition = edition .resolve("edition", || inherit()?.edition())? .parse() .with_context(|| "failed to parse the `edition` key")?; - project.edition = Some(MaybeWorkspace::Defined(edition.to_string())); + package.edition = Some(MaybeWorkspace::Defined(edition.to_string())); edition } else { Edition::Edition2015 @@ -1685,7 +1676,7 @@ ))); } - let rust_version = if let Some(rust_version) = &project.rust_version { + let rust_version = if let Some(rust_version) = &package.rust_version { let rust_version = rust_version .clone() .resolve("rust_version", || inherit()?.rust_version())?; @@ -1712,12 +1703,12 @@ None }; - if project.metabuild.is_some() { + if package.metabuild.is_some() { features.require(Feature::metabuild())?; } let resolve_behavior = match ( - project.resolver.as_ref(), + package.resolver.as_ref(), me.workspace.as_ref().and_then(|ws| ws.resolver.as_ref()), ) { (None, None) => None, @@ -1736,8 +1727,8 @@ package_name, package_root, edition, - &project.build, - &project.metabuild, + &package.build, + &package.metabuild, &mut warnings, &mut errors, )?; @@ -1754,7 +1745,7 @@ )); } - if let Some(links) = &project.links { + if let Some(links) = &package.links { if !targets.iter().any(|t| t.is_custom_build()) { bail!( "package `{}` specifies that it links to `{}` but does not \ @@ -1925,13 +1916,13 @@ } } - let exclude = project + let exclude = package .exclude .clone() .map(|mw| mw.resolve("exclude", || inherit()?.exclude())) .transpose()? .unwrap_or_default(); - let include = project + let include = package .include .clone() .map(|mw| mw.resolve("include", || inherit()?.include())) @@ -1944,61 +1935,61 @@ pkgid, deps, me.features.as_ref().unwrap_or(&empty_features), - project.links.as_deref(), + package.links.as_deref(), )?; let metadata = ManifestMetadata { - description: project + description: package .description .clone() .map(|mw| mw.resolve("description", || inherit()?.description())) .transpose()?, - homepage: project + homepage: package .homepage .clone() .map(|mw| mw.resolve("homepage", || inherit()?.homepage())) .transpose()?, - documentation: project + documentation: package .documentation .clone() .map(|mw| mw.resolve("documentation", || inherit()?.documentation())) .transpose()?, - readme: readme_for_project( + readme: readme_for_package( package_root, - project + package .readme .clone() .map(|mw| mw.resolve("readme", || inherit()?.readme(package_root))) .transpose()?, ), - authors: project + authors: package .authors .clone() .map(|mw| mw.resolve("authors", || inherit()?.authors())) .transpose()? .unwrap_or_default(), - license: project + license: package .license .clone() .map(|mw| mw.resolve("license", || inherit()?.license())) .transpose()?, - license_file: project + license_file: package .license_file .clone() .map(|mw| mw.resolve("license", || inherit()?.license_file(package_root))) .transpose()?, - repository: project + repository: package .repository .clone() .map(|mw| mw.resolve("repository", || inherit()?.repository())) .transpose()?, - keywords: project + keywords: package .keywords .clone() .map(|mw| mw.resolve("keywords", || inherit()?.keywords())) .transpose()? .unwrap_or_default(), - categories: project + categories: package .categories .clone() .map(|mw| mw.resolve("categories", || inherit()?.categories())) @@ -2010,54 +2001,54 @@ .map(|mw| mw.resolve("badges", || inherit()?.badges())) .transpose()? .unwrap_or_default(), - links: project.links.clone(), + links: package.links.clone(), }; - project.description = metadata + package.description = metadata .description .clone() .map(|description| MaybeWorkspace::Defined(description)); - project.homepage = metadata + package.homepage = metadata .homepage .clone() .map(|homepage| MaybeWorkspace::Defined(homepage)); - project.documentation = metadata + package.documentation = metadata .documentation .clone() .map(|documentation| MaybeWorkspace::Defined(documentation)); - project.readme = metadata + package.readme = metadata .readme .clone() .map(|readme| MaybeWorkspace::Defined(StringOrBool::String(readme))); - project.authors = project + package.authors = package .authors .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.authors.clone())); - project.license = metadata + package.license = metadata .license .clone() .map(|license| MaybeWorkspace::Defined(license)); - project.license_file = metadata + package.license_file = metadata .license_file .clone() .map(|license_file| MaybeWorkspace::Defined(license_file)); - project.repository = metadata + package.repository = metadata .repository .clone() .map(|repository| MaybeWorkspace::Defined(repository)); - project.keywords = project + package.keywords = package .keywords .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.keywords.clone())); - project.categories = project + package.categories = package .categories .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.categories.clone())); - project.rust_version = rust_version.clone().map(|rv| MaybeWorkspace::Defined(rv)); - project.exclude = project + package.rust_version = rust_version.clone().map(|rv| MaybeWorkspace::Defined(rv)); + package.exclude = package .exclude .as_ref() .map(|_| MaybeWorkspace::Defined(exclude.clone())); - project.include = project + package.include = package .include .as_ref() .map(|_| MaybeWorkspace::Defined(include.clone())); @@ -2067,12 +2058,12 @@ profiles.validate(&features, &mut warnings)?; } - let publish = project + let publish = package .publish .clone() .map(|publish| publish.resolve("publish", || inherit()?.publish()).unwrap()); - project.publish = publish.clone().map(|p| MaybeWorkspace::Defined(p)); + package.publish = publish.clone().map(|p| MaybeWorkspace::Defined(p)); let publish = match publish { Some(VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()), @@ -2088,7 +2079,7 @@ ) } - if let Some(run) = &project.default_run { + if let Some(run) = &package.default_run { if !targets .iter() .filter(|t| t.is_bin()) @@ -2100,22 +2091,22 @@ } } - let default_kind = project + let default_kind = package .default_target .as_ref() .map(|t| CompileTarget::new(&*t)) .transpose()? .map(CompileKind::Target); - let forced_kind = project + let forced_kind = package .forced_target .as_ref() .map(|t| CompileTarget::new(&*t)) .transpose()? .map(CompileKind::Target); - let custom_metadata = project.metadata.clone(); + let custom_metadata = package.metadata.clone(); let resolved_toml = TomlManifest { cargo_features: me.cargo_features.clone(), - package: Some(project.clone()), + package: Some(package.clone()), project: None, profile: me.profile.clone(), lib: me.lib.clone(), @@ -2145,7 +2136,7 @@ targets, exclude, include, - project.links.clone(), + package.links.clone(), metadata, custom_metadata, profiles, @@ -2156,13 +2147,13 @@ features, edition, rust_version, - project.im_a_teapot, - project.default_run.clone(), + package.im_a_teapot, + package.default_run.clone(), Rc::new(resolved_toml), - project.metabuild.clone().map(|sov| sov.0), + package.metabuild.clone().map(|sov| sov.0), resolve_behavior, ); - if project.license_file.is_some() && project.license.is_some() { + if package.license_file.is_some() && package.license.is_some() { manifest.warnings_mut().add_warning( "only one of `license` or `license-file` is necessary\n\ `license` should be used if the package license can be expressed \ @@ -2423,8 +2414,8 @@ } } -/// Returns the name of the README file for a `TomlProject`. -pub fn readme_for_project(package_root: &Path, readme: Option) -> Option { +/// Returns the name of the README file for a [`TomlPackage`]. +pub fn readme_for_package(package_root: &Path, readme: Option) -> Option { match &readme { None => default_readme_from_package_root(package_root), Some(value) => match value { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/dependency.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/dependency.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/dependency.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/dependency.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,1132 @@ +//! Information about dependencies in a manifest. + +use std::fmt::{Display, Formatter}; +use std::path::{Path, PathBuf}; + +use indexmap::IndexSet; +use toml_edit::KeyMut; + +use super::manifest::str_or_1_len_table; +use crate::core::GitReference; +use crate::core::SourceId; +use crate::core::Summary; +use crate::CargoResult; +use crate::Config; + +/// A dependency handled by Cargo. +/// +/// `None` means the field will be blank in TOML. +#[derive(Debug, PartialEq, Eq, Clone)] +#[non_exhaustive] +pub struct Dependency { + /// The name of the dependency (as it is set in its `Cargo.toml` and known + /// to crates.io). + pub name: String, + /// Whether the dependency is opted-in with a feature flag. + pub optional: Option, + + /// List of features to add (or None to keep features unchanged). + pub features: Option>, + /// Whether default features are enabled. + pub default_features: Option, + /// List of features inherited from a workspace dependency. + pub inherited_features: Option>, + + /// Where the dependency comes from. + pub source: Option, + /// Non-default registry. + pub registry: Option, + + /// If the dependency is renamed, this is the new name for the dependency + /// as a string. None if it is not renamed. + pub rename: Option, +} + +impl Dependency { + /// Create a new dependency with a name. + pub fn new(name: &str) -> Self { + Self { + name: name.into(), + optional: None, + features: None, + default_features: None, + inherited_features: None, + source: None, + registry: None, + rename: None, + } + } + + /// Set dependency to a given version. + pub fn set_source(mut self, source: impl Into) -> Self { + self.source = Some(source.into()); + self + } + + /// Remove the existing version requirement. + pub fn clear_version(mut self) -> Self { + match &mut self.source { + Some(Source::Registry(_)) => { + self.source = None; + } + Some(Source::Path(path)) => { + path.version = None; + } + Some(Source::Git(git)) => { + git.version = None; + } + Some(Source::Workspace(_workspace)) => {} + None => {} + } + self + } + + /// Set whether the dependency is optional. + #[allow(dead_code)] + pub fn set_optional(mut self, opt: bool) -> Self { + self.optional = Some(opt); + self + } + + /// Set features as an array of string (does some basic parsing). + #[allow(dead_code)] + pub fn set_features(mut self, features: IndexSet) -> Self { + self.features = Some(features); + self + } + + /// Set features as an array of string (does some basic parsing). + pub fn extend_features(mut self, features: impl IntoIterator) -> Self { + self.features + .get_or_insert_with(Default::default) + .extend(features); + self + } + + /// Set the value of default-features for the dependency. + #[allow(dead_code)] + pub fn set_default_features(mut self, default_features: bool) -> Self { + self.default_features = Some(default_features); + self + } + + /// Set the alias for the dependency. + pub fn set_rename(mut self, rename: &str) -> Self { + self.rename = Some(rename.into()); + self + } + + /// Set the value of registry for the dependency. + pub fn set_registry(mut self, registry: impl Into) -> Self { + self.registry = Some(registry.into()); + self + } + + /// Set features as an array of string (does some basic parsing). + pub fn set_inherited_features(mut self, features: IndexSet) -> Self { + self.inherited_features = Some(features); + self + } + + /// Get the dependency source. + pub fn source(&self) -> Option<&Source> { + self.source.as_ref() + } + + /// Get version of dependency. + pub fn version(&self) -> Option<&str> { + match self.source()? { + Source::Registry(src) => Some(src.version.as_str()), + Source::Path(src) => src.version.as_deref(), + Source::Git(src) => src.version.as_deref(), + Source::Workspace(_) => None, + } + } + + /// Get registry of the dependency. + pub fn registry(&self) -> Option<&str> { + self.registry.as_deref() + } + + /// Get the alias for the dependency (if any). + pub fn rename(&self) -> Option<&str> { + self.rename.as_deref() + } + + /// Whether default features are activated. + pub fn default_features(&self) -> Option { + self.default_features + } + + /// Get whether the dep is optional. + pub fn optional(&self) -> Option { + self.optional + } + + /// Get the SourceID for this dependency. + pub fn source_id(&self, config: &Config) -> CargoResult> { + match &self.source.as_ref() { + Some(Source::Registry(_)) | None => { + if let Some(r) = self.registry() { + let source_id = SourceId::alt_registry(config, r)?; + Ok(MaybeWorkspace::Other(source_id)) + } else { + let source_id = SourceId::crates_io(config)?; + Ok(MaybeWorkspace::Other(source_id)) + } + } + Some(Source::Path(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)), + Some(Source::Git(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)), + Some(Source::Workspace(workspace)) => Ok(MaybeWorkspace::Workspace(workspace.clone())), + } + } + + /// Query to find this dependency. + pub fn query( + &self, + config: &Config, + ) -> CargoResult> { + let source_id = self.source_id(config)?; + match source_id { + MaybeWorkspace::Workspace(workspace) => Ok(MaybeWorkspace::Workspace(workspace)), + MaybeWorkspace::Other(source_id) => Ok(MaybeWorkspace::Other( + crate::core::dependency::Dependency::parse( + self.name.as_str(), + self.version(), + source_id, + )?, + )), + } + } +} + +/// Either a workspace or another type. +pub enum MaybeWorkspace { + Workspace(WorkspaceSource), + Other(T), +} + +impl Dependency { + /// Create a dependency from a TOML table entry. + pub fn from_toml(crate_root: &Path, key: &str, item: &toml_edit::Item) -> CargoResult { + if let Some(version) = item.as_str() { + let dep = Self::new(key).set_source(RegistrySource::new(version)); + Ok(dep) + } else if let Some(table) = item.as_table_like() { + let (name, rename) = if let Some(value) = table.get("package") { + ( + value + .as_str() + .ok_or_else(|| invalid_type(key, "package", value.type_name(), "string"))? + .to_owned(), + Some(key.to_owned()), + ) + } else { + (key.to_owned(), None) + }; + + let source: Source = + if let Some(git) = table.get("git") { + let mut src = GitSource::new( + git.as_str() + .ok_or_else(|| invalid_type(key, "git", git.type_name(), "string"))?, + ); + if let Some(value) = table.get("branch") { + src = src.set_branch(value.as_str().ok_or_else(|| { + invalid_type(key, "branch", value.type_name(), "string") + })?); + } + if let Some(value) = table.get("tag") { + src = src.set_tag(value.as_str().ok_or_else(|| { + invalid_type(key, "tag", value.type_name(), "string") + })?); + } + if let Some(value) = table.get("rev") { + src = src.set_rev(value.as_str().ok_or_else(|| { + invalid_type(key, "rev", value.type_name(), "string") + })?); + } + if let Some(value) = table.get("version") { + src = src.set_version(value.as_str().ok_or_else(|| { + invalid_type(key, "version", value.type_name(), "string") + })?); + } + src.into() + } else if let Some(path) = table.get("path") { + let path = crate_root + .join(path.as_str().ok_or_else(|| { + invalid_type(key, "path", path.type_name(), "string") + })?); + let mut src = PathSource::new(path); + if let Some(value) = table.get("version") { + src = src.set_version(value.as_str().ok_or_else(|| { + invalid_type(key, "version", value.type_name(), "string") + })?); + } + src.into() + } else if let Some(version) = table.get("version") { + let src = RegistrySource::new(version.as_str().ok_or_else(|| { + invalid_type(key, "version", version.type_name(), "string") + })?); + src.into() + } else if let Some(workspace) = table.get("workspace") { + let workspace_bool = workspace.as_bool().ok_or_else(|| { + invalid_type(key, "workspace", workspace.type_name(), "bool") + })?; + if !workspace_bool { + anyhow::bail!("`{key}.workspace = false` is unsupported") + } + let src = WorkspaceSource::new(); + src.into() + } else { + anyhow::bail!("Unrecognized dependency source for `{key}`"); + }; + let registry = if let Some(value) = table.get("registry") { + Some( + value + .as_str() + .ok_or_else(|| invalid_type(key, "registry", value.type_name(), "string"))? + .to_owned(), + ) + } else { + None + }; + + let default_features = table.get("default-features").and_then(|v| v.as_bool()); + if table.contains_key("default_features") { + anyhow::bail!("Use of `default_features` in `{key}` is unsupported, please switch to `default-features`"); + } + + let features = if let Some(value) = table.get("features") { + Some( + value + .as_array() + .ok_or_else(|| invalid_type(key, "features", value.type_name(), "array"))? + .iter() + .map(|v| { + v.as_str().map(|s| s.to_owned()).ok_or_else(|| { + invalid_type(key, "features", v.type_name(), "string") + }) + }) + .collect::>>()?, + ) + } else { + None + }; + + let optional = table.get("optional").and_then(|v| v.as_bool()); + + let dep = Self { + name, + rename, + source: Some(source), + registry, + default_features, + features, + optional, + inherited_features: None, + }; + Ok(dep) + } else { + anyhow::bail!("Unrecognized` dependency entry format for `{key}"); + } + } + + /// Get the dependency name as defined in the manifest, + /// that is, either the alias (rename field if Some), + /// or the official package name (name field). + pub fn toml_key(&self) -> &str { + self.rename().unwrap_or(&self.name) + } + + /// Convert dependency to TOML. + /// + /// Returns a tuple with the dependency's name and either the version as a + /// `String` or the path/git repository as an `InlineTable`. + /// (If the dependency is set as `optional` or `default-features` is set to + /// `false`, an `InlineTable` is returned in any case.) + /// + /// # Panic + /// + /// Panics if the path is relative + pub fn to_toml(&self, crate_root: &Path) -> toml_edit::Item { + assert!( + crate_root.is_absolute(), + "Absolute path needed, got: {}", + crate_root.display() + ); + let table: toml_edit::Item = match ( + self.optional.unwrap_or(false), + self.features.as_ref(), + self.default_features.unwrap_or(true), + self.source.as_ref(), + self.registry.as_ref(), + self.rename.as_ref(), + ) { + // Extra short when version flag only + ( + false, + None, + true, + Some(Source::Registry(RegistrySource { version: v })), + None, + None, + ) => toml_edit::value(v), + (false, None, true, Some(Source::Workspace(WorkspaceSource {})), None, None) => { + let mut table = toml_edit::InlineTable::default(); + table.set_dotted(true); + table.insert("workspace", true.into()); + toml_edit::value(toml_edit::Value::InlineTable(table)) + } + // Other cases are represented as an inline table + (_, _, _, _, _, _) => { + let mut table = toml_edit::InlineTable::default(); + + match &self.source { + Some(Source::Registry(src)) => { + table.insert("version", src.version.as_str().into()); + } + Some(Source::Path(src)) => { + let relpath = path_field(crate_root, &src.path); + if let Some(r) = src.version.as_deref() { + table.insert("version", r.into()); + } + table.insert("path", relpath.into()); + } + Some(Source::Git(src)) => { + table.insert("git", src.git.as_str().into()); + if let Some(branch) = src.branch.as_deref() { + table.insert("branch", branch.into()); + } + if let Some(tag) = src.tag.as_deref() { + table.insert("tag", tag.into()); + } + if let Some(rev) = src.rev.as_deref() { + table.insert("rev", rev.into()); + } + if let Some(r) = src.version.as_deref() { + table.insert("version", r.into()); + } + } + Some(Source::Workspace(_)) => { + table.insert("workspace", true.into()); + } + None => {} + } + if table.contains_key("version") { + if let Some(r) = self.registry.as_deref() { + table.insert("registry", r.into()); + } + } + + if self.rename.is_some() { + table.insert("package", self.name.as_str().into()); + } + if let Some(v) = self.default_features { + table.insert("default-features", v.into()); + } + if let Some(features) = self.features.as_ref() { + let features: toml_edit::Value = features.iter().cloned().collect(); + table.insert("features", features); + } + if let Some(v) = self.optional { + table.insert("optional", v.into()); + } + + toml_edit::value(toml_edit::Value::InlineTable(table)) + } + }; + + table + } + + /// Modify existing entry to match this dependency. + pub fn update_toml<'k>( + &self, + crate_root: &Path, + key: &mut KeyMut<'k>, + item: &mut toml_edit::Item, + ) { + if str_or_1_len_table(item) { + // Nothing to preserve + *item = self.to_toml(crate_root); + key.fmt(); + } else if let Some(table) = item.as_table_like_mut() { + match &self.source { + Some(Source::Registry(src)) => { + table.insert("version", toml_edit::value(src.version.as_str())); + + for key in ["path", "git", "branch", "tag", "rev", "workspace"] { + table.remove(key); + } + } + Some(Source::Path(src)) => { + let relpath = path_field(crate_root, &src.path); + table.insert("path", toml_edit::value(relpath)); + if let Some(r) = src.version.as_deref() { + table.insert("version", toml_edit::value(r)); + } else { + table.remove("version"); + } + + for key in ["git", "branch", "tag", "rev", "workspace"] { + table.remove(key); + } + } + Some(Source::Git(src)) => { + table.insert("git", toml_edit::value(src.git.as_str())); + if let Some(branch) = src.branch.as_deref() { + table.insert("branch", toml_edit::value(branch)); + } else { + table.remove("branch"); + } + if let Some(tag) = src.tag.as_deref() { + table.insert("tag", toml_edit::value(tag)); + } else { + table.remove("tag"); + } + if let Some(rev) = src.rev.as_deref() { + table.insert("rev", toml_edit::value(rev)); + } else { + table.remove("rev"); + } + if let Some(r) = src.version.as_deref() { + table.insert("version", toml_edit::value(r)); + } else { + table.remove("version"); + } + + for key in ["path", "workspace"] { + table.remove(key); + } + } + Some(Source::Workspace(_)) => { + table.insert("workspace", toml_edit::value(true)); + table.set_dotted(true); + key.fmt(); + for key in [ + "version", + "registry", + "registry-index", + "path", + "git", + "branch", + "tag", + "rev", + "package", + "default-features", + ] { + table.remove(key); + } + } + None => {} + } + if table.contains_key("version") { + if let Some(r) = self.registry.as_deref() { + table.insert("registry", toml_edit::value(r)); + } else { + table.remove("registry"); + } + } else { + table.remove("registry"); + } + + if self.rename.is_some() { + table.insert("package", toml_edit::value(self.name.as_str())); + } + match self.default_features { + Some(v) => { + table.insert("default-features", toml_edit::value(v)); + } + None => { + table.remove("default-features"); + } + } + if let Some(new_features) = self.features.as_ref() { + let mut features = table + .get("features") + .and_then(|i| i.as_value()) + .and_then(|v| v.as_array()) + .and_then(|a| { + a.iter() + .map(|v| v.as_str()) + .collect::>>() + }) + .unwrap_or_default(); + features.extend(new_features.iter().map(|s| s.as_str())); + let features = toml_edit::value(features.into_iter().collect::()); + table.set_dotted(false); + table.insert("features", features); + } else { + table.remove("features"); + } + match self.optional { + Some(v) => { + table.set_dotted(false); + table.insert("optional", toml_edit::value(v)); + } + None => { + table.remove("optional"); + } + } + + table.fmt(); + } else { + unreachable!("Invalid dependency type: {}", item.type_name()); + } + } +} + +fn invalid_type(dep: &str, key: &str, actual: &str, expected: &str) -> anyhow::Error { + anyhow::format_err!("Found {actual} for {key} when {expected} was expected for {dep}") +} + +impl std::fmt::Display for Dependency { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(source) = self.source() { + write!(f, "{}@{}", self.name, source) + } else { + self.toml_key().fmt(f) + } + } +} + +impl<'s> From<&'s Summary> for Dependency { + fn from(other: &'s Summary) -> Self { + let source: Source = if let Some(path) = other.source_id().local_path() { + PathSource::new(path) + .set_version(other.version().to_string()) + .into() + } else if let Some(git_ref) = other.source_id().git_reference() { + let mut src = GitSource::new(other.source_id().url().to_string()) + .set_version(other.version().to_string()); + match git_ref { + GitReference::Branch(branch) => src = src.set_branch(branch), + GitReference::Tag(tag) => src = src.set_tag(tag), + GitReference::Rev(rev) => src = src.set_rev(rev), + GitReference::DefaultBranch => {} + } + src.into() + } else { + RegistrySource::new(other.version().to_string()).into() + }; + Dependency::new(other.name().as_str()).set_source(source) + } +} + +impl From for Dependency { + fn from(other: Summary) -> Self { + (&other).into() + } +} + +fn path_field(crate_root: &Path, abs_path: &Path) -> String { + let relpath = pathdiff::diff_paths(abs_path, crate_root).expect("both paths are absolute"); + let relpath = relpath.to_str().unwrap().replace('\\', "/"); + relpath +} + +/// Primary location of a dependency. +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +pub enum Source { + /// Dependency from a registry. + Registry(RegistrySource), + /// Dependency from a local path. + Path(PathSource), + /// Dependency from a git repo. + Git(GitSource), + /// Dependency from a workspace. + Workspace(WorkspaceSource), +} + +impl Source { + /// Access the registry source, if present. + pub fn as_registry(&self) -> Option<&RegistrySource> { + match self { + Self::Registry(src) => Some(src), + _ => None, + } + } + + /// Access the path source, if present. + #[allow(dead_code)] + pub fn as_path(&self) -> Option<&PathSource> { + match self { + Self::Path(src) => Some(src), + _ => None, + } + } + + /// Access the git source, if present. + #[allow(dead_code)] + pub fn as_git(&self) -> Option<&GitSource> { + match self { + Self::Git(src) => Some(src), + _ => None, + } + } + + /// Access the workspace source, if present. + #[allow(dead_code)] + pub fn as_workspace(&self) -> Option<&WorkspaceSource> { + match self { + Self::Workspace(src) => Some(src), + _ => None, + } + } +} + +impl std::fmt::Display for Source { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Registry(src) => src.fmt(f), + Self::Path(src) => src.fmt(f), + Self::Git(src) => src.fmt(f), + Self::Workspace(src) => src.fmt(f), + } + } +} + +impl<'s> From<&'s Source> for Source { + fn from(inner: &'s Source) -> Self { + inner.clone() + } +} + +impl From for Source { + fn from(inner: RegistrySource) -> Self { + Self::Registry(inner) + } +} + +impl From for Source { + fn from(inner: PathSource) -> Self { + Self::Path(inner) + } +} + +impl From for Source { + fn from(inner: GitSource) -> Self { + Self::Git(inner) + } +} + +impl From for Source { + fn from(inner: WorkspaceSource) -> Self { + Self::Workspace(inner) + } +} + +/// Dependency from a registry. +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +#[non_exhaustive] +pub struct RegistrySource { + /// Version requirement. + pub version: String, +} + +impl RegistrySource { + /// Specify dependency by version requirement. + pub fn new(version: impl AsRef) -> Self { + // versions might have semver metadata appended which we do not want to + // store in the cargo toml files. This would cause a warning upon compilation + // ("version requirement […] includes semver metadata which will be ignored") + let version = version.as_ref().split('+').next().unwrap(); + Self { + version: version.to_owned(), + } + } +} + +impl std::fmt::Display for RegistrySource { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.version.fmt(f) + } +} + +/// Dependency from a local path. +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +#[non_exhaustive] +pub struct PathSource { + /// Local, absolute path. + pub path: PathBuf, + /// Version requirement for when published. + pub version: Option, +} + +impl PathSource { + /// Specify dependency from a path. + pub fn new(path: impl Into) -> Self { + Self { + path: path.into(), + version: None, + } + } + + /// Set an optional version requirement. + pub fn set_version(mut self, version: impl AsRef) -> Self { + // versions might have semver metadata appended which we do not want to + // store in the cargo toml files. This would cause a warning upon compilation + // ("version requirement […] includes semver metadata which will be ignored") + let version = version.as_ref().split('+').next().unwrap(); + self.version = Some(version.to_owned()); + self + } + + /// Get the SourceID for this dependency. + pub fn source_id(&self) -> CargoResult { + SourceId::for_path(&self.path) + } +} + +impl std::fmt::Display for PathSource { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.path.display().fmt(f) + } +} + +/// Dependency from a git repo. +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +#[non_exhaustive] +pub struct GitSource { + /// Repository URL. + pub git: String, + /// Select specific branch. + pub branch: Option, + /// Select specific tag. + pub tag: Option, + /// Select specific rev. + pub rev: Option, + /// Version requirement for when published. + pub version: Option, +} + +impl GitSource { + /// Specify dependency from a git repo. + pub fn new(git: impl Into) -> Self { + Self { + git: git.into(), + branch: None, + tag: None, + rev: None, + version: None, + } + } + + /// Specify an optional branch. + pub fn set_branch(mut self, branch: impl Into) -> Self { + self.branch = Some(branch.into()); + self.tag = None; + self.rev = None; + self + } + + /// Specify an optional tag. + pub fn set_tag(mut self, tag: impl Into) -> Self { + self.branch = None; + self.tag = Some(tag.into()); + self.rev = None; + self + } + + /// Specify an optional rev. + pub fn set_rev(mut self, rev: impl Into) -> Self { + self.branch = None; + self.tag = None; + self.rev = Some(rev.into()); + self + } + + /// Get the SourceID for this dependency. + pub fn source_id(&self) -> CargoResult { + let git_url = self.git.parse::()?; + let git_ref = self.git_ref(); + SourceId::for_git(&git_url, git_ref) + } + + fn git_ref(&self) -> GitReference { + match ( + self.branch.as_deref(), + self.tag.as_deref(), + self.rev.as_deref(), + ) { + (Some(branch), _, _) => GitReference::Branch(branch.to_owned()), + (_, Some(tag), _) => GitReference::Tag(tag.to_owned()), + (_, _, Some(rev)) => GitReference::Rev(rev.to_owned()), + _ => GitReference::DefaultBranch, + } + } + + /// Set an optional version requirement. + pub fn set_version(mut self, version: impl AsRef) -> Self { + // versions might have semver metadata appended which we do not want to + // store in the cargo toml files. This would cause a warning upon compilation + // ("version requirement […] includes semver metadata which will be ignored") + let version = version.as_ref().split('+').next().unwrap(); + self.version = Some(version.to_owned()); + self + } +} + +impl std::fmt::Display for GitSource { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let git_ref = self.git_ref(); + if let Some(pretty_ref) = git_ref.pretty_ref() { + write!(f, "{}?{}", self.git, pretty_ref) + } else { + write!(f, "{}", self.git) + } + } +} + +/// Dependency from a workspace. +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +#[non_exhaustive] +pub struct WorkspaceSource; + +impl WorkspaceSource { + pub fn new() -> Self { + Self + } +} + +impl Display for WorkspaceSource { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + "workspace".fmt(f) + } +} + +#[cfg(test)] +mod tests { + use std::path::Path; + + use crate::util::toml_mut::manifest::LocalManifest; + use cargo_util::paths; + + use super::*; + + #[test] + fn to_toml_simple_dep() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0")); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_simple_dep_with_version() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0")); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + assert_eq!(item.as_str(), Some("1.0")); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_optional_dep() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep") + .set_source(RegistrySource::new("1.0")) + .set_optional(true); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!(dep.get("optional").unwrap().as_bool(), Some(true)); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_dep_without_default_features() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep") + .set_source(RegistrySource::new("1.0")) + .set_default_features(false); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false)); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_dep_with_path_source() { + let root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let crate_root = root.join("foo"); + let dep = Dependency::new("dep").set_source(PathSource::new(root.join("bar"))); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!(dep.get("path").unwrap().as_str(), Some("../bar")); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_dep_with_git_source() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep").set_source(GitSource::new("https://foor/bar.git")); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!( + dep.get("git").unwrap().as_str(), + Some("https://foor/bar.git") + ); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_renamed_dep() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep") + .set_source(RegistrySource::new("1.0")) + .set_rename("d"); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "d".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!(dep.get("package").unwrap().as_str(), Some("dep")); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_dep_from_alt_registry() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep") + .set_source(RegistrySource::new("1.0")) + .set_registry("alternative"); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "dep".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!(dep.get("registry").unwrap().as_str(), Some("alternative")); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn to_toml_complex_dep() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let dep = Dependency::new("dep") + .set_source(RegistrySource::new("1.0")) + .set_default_features(false) + .set_rename("d"); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + assert_eq!(key, "d".to_owned()); + assert!(item.is_inline_table()); + + let dep = item.as_inline_table().unwrap(); + assert_eq!(dep.get("package").unwrap().as_str(), Some("dep")); + assert_eq!(dep.get("version").unwrap().as_str(), Some("1.0")); + assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false)); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn paths_with_forward_slashes_are_left_as_is() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let path = crate_root.join("sibling/crate"); + let relpath = "sibling/crate"; + let dep = Dependency::new("dep").set_source(PathSource::new(path)); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + let table = item.as_inline_table().unwrap(); + let got = table.get("path").unwrap().as_str().unwrap(); + assert_eq!(got, relpath); + + verify_roundtrip(&crate_root, key, &item); + } + + #[test] + fn overwrite_with_workspace_source_fmt_key() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("./"))); + let toml = "dep = \"1.0\"\n"; + let manifest = toml.parse().unwrap(); + let mut local = LocalManifest { + path: crate_root.clone(), + manifest, + }; + assert_eq!(local.manifest.to_string(), toml); + for (key, item) in local.data.clone().iter() { + let dep = Dependency::from_toml(&crate_root, key, item).unwrap(); + let dep = dep.set_source(WorkspaceSource::new()); + local.insert_into_table(&vec![], &dep).unwrap(); + assert_eq!(local.data.to_string(), "dep.workspace = true\n"); + } + } + + #[test] + #[cfg(windows)] + fn normalise_windows_style_paths() { + let crate_root = + paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); + let original = crate_root.join(r"sibling\crate"); + let should_be = "sibling/crate"; + let dep = Dependency::new("dep").set_source(PathSource::new(original)); + let key = dep.toml_key(); + let item = dep.to_toml(&crate_root); + + let table = item.as_inline_table().unwrap(); + let got = table.get("path").unwrap().as_str().unwrap(); + assert_eq!(got, should_be); + + verify_roundtrip(&crate_root, key, &item); + } + + #[track_caller] + fn verify_roundtrip(crate_root: &Path, key: &str, item: &toml_edit::Item) { + let roundtrip = Dependency::from_toml(crate_root, key, item).unwrap(); + let round_key = roundtrip.toml_key(); + let round_item = roundtrip.to_toml(crate_root); + assert_eq!(key, round_key); + assert_eq!(item.to_string(), round_item.to_string()); + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/manifest.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/manifest.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/manifest.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/manifest.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,548 @@ +//! Parsing and editing of manifest files. + +use std::ops::{Deref, DerefMut}; +use std::path::{Path, PathBuf}; +use std::str; + +use anyhow::Context as _; + +use super::dependency::Dependency; +use crate::core::dependency::DepKind; +use crate::core::FeatureValue; +use crate::util::interning::InternedString; +use crate::CargoResult; + +/// Dependency table to add deps to. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct DepTable { + kind: DepKind, + target: Option, +} + +impl DepTable { + const KINDS: &'static [Self] = &[ + Self::new().set_kind(DepKind::Normal), + Self::new().set_kind(DepKind::Development), + Self::new().set_kind(DepKind::Build), + ]; + + /// Reference to a Dependency Table. + pub const fn new() -> Self { + Self { + kind: DepKind::Normal, + target: None, + } + } + + /// Choose the type of dependency. + pub const fn set_kind(mut self, kind: DepKind) -> Self { + self.kind = kind; + self + } + + /// Choose the platform for the dependency. + pub fn set_target(mut self, target: impl Into) -> Self { + self.target = Some(target.into()); + self + } + + /// Type of dependency. + pub fn kind(&self) -> DepKind { + self.kind + } + + /// Platform for the dependency. + pub fn target(&self) -> Option<&str> { + self.target.as_deref() + } + + /// Keys to the table. + pub fn to_table(&self) -> Vec<&str> { + if let Some(target) = &self.target { + vec!["target", target, self.kind_table()] + } else { + vec![self.kind_table()] + } + } + + fn kind_table(&self) -> &str { + match self.kind { + DepKind::Normal => "dependencies", + DepKind::Development => "dev-dependencies", + DepKind::Build => "build-dependencies", + } + } +} + +impl Default for DepTable { + fn default() -> Self { + Self::new() + } +} + +impl From for DepTable { + fn from(other: DepKind) -> Self { + Self::new().set_kind(other) + } +} + +/// An editable Cargo manifest. +#[derive(Debug, Clone)] +pub struct Manifest { + /// Manifest contents as TOML data. + pub data: toml_edit::Document, +} + +impl Manifest { + /// Get the manifest's package name. + pub fn package_name(&self) -> CargoResult<&str> { + self.data + .as_table() + .get("package") + .and_then(|m| m.get("name")) + .and_then(|m| m.as_str()) + .ok_or_else(parse_manifest_err) + } + + /// Get the specified table from the manifest. + pub fn get_table<'a>(&'a self, table_path: &[String]) -> CargoResult<&'a toml_edit::Item> { + /// Descend into a manifest until the required table is found. + fn descend<'a>( + input: &'a toml_edit::Item, + path: &[String], + ) -> CargoResult<&'a toml_edit::Item> { + if let Some(segment) = path.get(0) { + let value = input + .get(&segment) + .ok_or_else(|| non_existent_table_err(segment))?; + + if value.is_table_like() { + descend(value, &path[1..]) + } else { + Err(non_existent_table_err(segment)) + } + } else { + Ok(input) + } + } + + descend(self.data.as_item(), table_path) + } + + /// Get the specified table from the manifest. + pub fn get_table_mut<'a>( + &'a mut self, + table_path: &[String], + ) -> CargoResult<&'a mut toml_edit::Item> { + /// Descend into a manifest until the required table is found. + fn descend<'a>( + input: &'a mut toml_edit::Item, + path: &[String], + ) -> CargoResult<&'a mut toml_edit::Item> { + if let Some(segment) = path.get(0) { + let mut default_table = toml_edit::Table::new(); + default_table.set_implicit(true); + let value = input[&segment].or_insert(toml_edit::Item::Table(default_table)); + + if value.is_table_like() { + descend(value, &path[1..]) + } else { + Err(non_existent_table_err(segment)) + } + } else { + Ok(input) + } + } + + descend(self.data.as_item_mut(), table_path) + } + + /// Get all sections in the manifest that exist and might contain + /// dependencies. The returned items are always `Table` or + /// `InlineTable`. + pub fn get_sections(&self) -> Vec<(DepTable, toml_edit::Item)> { + let mut sections = Vec::new(); + + for table in DepTable::KINDS { + let dependency_type = table.kind_table(); + // Dependencies can be in the three standard sections... + if self + .data + .get(dependency_type) + .map(|t| t.is_table_like()) + .unwrap_or(false) + { + sections.push((table.clone(), self.data[dependency_type].clone())) + } + + // ... and in `target..(build-/dev-)dependencies`. + let target_sections = self + .data + .as_table() + .get("target") + .and_then(toml_edit::Item::as_table_like) + .into_iter() + .flat_map(toml_edit::TableLike::iter) + .filter_map(|(target_name, target_table)| { + let dependency_table = target_table.get(dependency_type)?; + dependency_table.as_table_like().map(|_| { + ( + table.clone().set_target(target_name), + dependency_table.clone(), + ) + }) + }); + + sections.extend(target_sections); + } + + sections + } + + pub fn get_legacy_sections(&self) -> Vec { + let mut result = Vec::new(); + + for dependency_type in ["dev_dependencies", "build_dependencies"] { + if self.data.contains_key(dependency_type) { + result.push(dependency_type.to_owned()); + } + + // ... and in `target..(build-/dev-)dependencies`. + result.extend( + self.data + .as_table() + .get("target") + .and_then(toml_edit::Item::as_table_like) + .into_iter() + .flat_map(toml_edit::TableLike::iter) + .filter_map(|(target_name, target_table)| { + if target_table.as_table_like()?.contains_key(dependency_type) { + Some(format!("target.{target_name}.{dependency_type}")) + } else { + None + } + }), + ); + } + result + } +} + +impl str::FromStr for Manifest { + type Err = anyhow::Error; + + /// Read manifest data from string + fn from_str(input: &str) -> ::std::result::Result { + let d: toml_edit::Document = input.parse().context("Manifest not valid TOML")?; + + Ok(Manifest { data: d }) + } +} + +impl std::fmt::Display for Manifest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.data.fmt(f) + } +} + +/// An editable Cargo manifest that is available locally. +#[derive(Debug)] +pub struct LocalManifest { + /// Path to the manifest. + pub path: PathBuf, + /// Manifest contents. + pub manifest: Manifest, +} + +impl Deref for LocalManifest { + type Target = Manifest; + + fn deref(&self) -> &Manifest { + &self.manifest + } +} + +impl DerefMut for LocalManifest { + fn deref_mut(&mut self) -> &mut Manifest { + &mut self.manifest + } +} + +impl LocalManifest { + /// Construct the `LocalManifest` corresponding to the `Path` provided.. + pub fn try_new(path: &Path) -> CargoResult { + if !path.is_absolute() { + anyhow::bail!("can only edit absolute paths, got {}", path.display()); + } + let data = cargo_util::paths::read(&path)?; + let manifest = data.parse().context("Unable to parse Cargo.toml")?; + Ok(LocalManifest { + manifest, + path: path.to_owned(), + }) + } + + /// Write changes back to the file. + pub fn write(&self) -> CargoResult<()> { + if !self.manifest.data.contains_key("package") + && !self.manifest.data.contains_key("project") + { + if self.manifest.data.contains_key("workspace") { + anyhow::bail!( + "found virtual manifest at {}, but this command requires running against an \ + actual package in this workspace.", + self.path.display() + ); + } else { + anyhow::bail!( + "missing expected `package` or `project` fields in {}", + self.path.display() + ); + } + } + + let s = self.manifest.data.to_string(); + let new_contents_bytes = s.as_bytes(); + + cargo_util::paths::write(&self.path, new_contents_bytes) + } + + /// Lookup a dependency. + pub fn get_dependency_versions<'s>( + &'s self, + dep_key: &'s str, + ) -> impl Iterator)> + 's { + let crate_root = self.path.parent().expect("manifest path is absolute"); + self.get_sections() + .into_iter() + .filter_map(move |(table_path, table)| { + let table = table.into_table().ok()?; + Some( + table + .into_iter() + .filter_map(|(key, item)| { + if key.as_str() == dep_key { + Some((table_path.clone(), key, item)) + } else { + None + } + }) + .collect::>(), + ) + }) + .flatten() + .map(move |(table_path, dep_key, dep_item)| { + let dep = Dependency::from_toml(crate_root, &dep_key, &dep_item); + (table_path, dep) + }) + } + + /// Add entry to a Cargo.toml. + pub fn insert_into_table( + &mut self, + table_path: &[String], + dep: &Dependency, + ) -> CargoResult<()> { + let crate_root = self + .path + .parent() + .expect("manifest path is absolute") + .to_owned(); + let dep_key = dep.toml_key(); + + let table = self.get_table_mut(table_path)?; + if let Some((mut dep_key, dep_item)) = table + .as_table_like_mut() + .unwrap() + .get_key_value_mut(dep_key) + { + dep.update_toml(&crate_root, &mut dep_key, dep_item); + } else { + let new_dependency = dep.to_toml(&crate_root); + table[dep_key] = new_dependency; + } + if let Some(t) = table.as_inline_table_mut() { + t.fmt() + } + + Ok(()) + } + + /// Remove entry from a Cargo.toml. + pub fn remove_from_table(&mut self, table_path: &[String], name: &str) -> CargoResult<()> { + let parent_table = self.get_table_mut(table_path)?; + + let dep = parent_table + .get_mut(name) + .filter(|t| !t.is_none()) + .ok_or_else(|| non_existent_dependency_err(name, table_path.join(".")))?; + + // remove the dependency + *dep = toml_edit::Item::None; + + // remove table if empty + if parent_table.as_table_like().unwrap().is_empty() { + *parent_table = toml_edit::Item::None; + } + + Ok(()) + } + + /// Remove references to `dep_key` if its no longer present. + pub fn gc_dep(&mut self, dep_key: &str) { + let explicit_dep_activation = self.is_explicit_dep_activation(dep_key); + let status = self.dep_status(dep_key); + + if let Some(toml_edit::Item::Table(feature_table)) = + self.data.as_table_mut().get_mut("features") + { + for (_feature, mut feature_values) in feature_table.iter_mut() { + if let toml_edit::Item::Value(toml_edit::Value::Array(feature_values)) = + &mut feature_values + { + fix_feature_activations( + feature_values, + dep_key, + status, + explicit_dep_activation, + ); + } + } + } + } + + fn is_explicit_dep_activation(&self, dep_key: &str) -> bool { + if let Some(toml_edit::Item::Table(feature_table)) = self.data.as_table().get("features") { + for values in feature_table + .iter() + .map(|(_, a)| a) + .filter_map(|i| i.as_value()) + .filter_map(|v| v.as_array()) + { + for value in values.iter().filter_map(|v| v.as_str()) { + let value = FeatureValue::new(InternedString::new(value)); + if let FeatureValue::Dep { dep_name } = &value { + if dep_name.as_str() == dep_key { + return true; + } + } + } + } + } + + false + } + + fn dep_status(&self, dep_key: &str) -> DependencyStatus { + let mut status = DependencyStatus::None; + for (_, tbl) in self.get_sections() { + if let toml_edit::Item::Table(tbl) = tbl { + if let Some(dep_item) = tbl.get(dep_key) { + let optional = dep_item + .get("optional") + .and_then(|i| i.as_value()) + .and_then(|i| i.as_bool()) + .unwrap_or(false); + if optional { + return DependencyStatus::Optional; + } else { + status = DependencyStatus::Required; + } + } + } + } + status + } +} + +impl std::fmt::Display for LocalManifest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.manifest.fmt(f) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum DependencyStatus { + None, + Optional, + Required, +} + +fn fix_feature_activations( + feature_values: &mut toml_edit::Array, + dep_key: &str, + status: DependencyStatus, + explicit_dep_activation: bool, +) { + let remove_list: Vec = feature_values + .iter() + .enumerate() + .filter_map(|(idx, value)| value.as_str().map(|s| (idx, s))) + .filter_map(|(idx, value)| { + let parsed_value = FeatureValue::new(InternedString::new(value)); + match status { + DependencyStatus::None => match (parsed_value, explicit_dep_activation) { + (FeatureValue::Feature(dep_name), false) + | (FeatureValue::Dep { dep_name }, _) + | (FeatureValue::DepFeature { dep_name, .. }, _) => dep_name == dep_key, + _ => false, + }, + DependencyStatus::Optional => false, + DependencyStatus::Required => match (parsed_value, explicit_dep_activation) { + (FeatureValue::Feature(dep_name), false) + | (FeatureValue::Dep { dep_name }, _) => dep_name == dep_key, + (FeatureValue::Feature(_), true) | (FeatureValue::DepFeature { .. }, _) => { + false + } + }, + } + .then(|| idx) + }) + .collect(); + + // Remove found idx in revers order so we don't invalidate the idx. + for idx in remove_list.iter().rev() { + feature_values.remove(*idx); + } + + if status == DependencyStatus::Required { + for value in feature_values.iter_mut() { + let parsed_value = if let Some(value) = value.as_str() { + FeatureValue::new(InternedString::new(value)) + } else { + continue; + }; + if let FeatureValue::DepFeature { + dep_name, + dep_feature, + weak, + } = parsed_value + { + if dep_name == dep_key && weak { + *value = format!("{dep_name}/{dep_feature}").into(); + } + } + } + } + + feature_values.fmt(); +} + +pub fn str_or_1_len_table(item: &toml_edit::Item) -> bool { + item.is_str() || item.as_table_like().map(|t| t.len() == 1).unwrap_or(false) +} + +fn parse_manifest_err() -> anyhow::Error { + anyhow::format_err!("unable to parse external Cargo.toml") +} + +fn non_existent_table_err(table: impl std::fmt::Display) -> anyhow::Error { + anyhow::format_err!("the table `{table}` could not be found.") +} + +fn non_existent_dependency_err( + name: impl std::fmt::Display, + table: impl std::fmt::Display, +) -> anyhow::Error { + anyhow::format_err!("the dependency `{name}` could not be found in `{table}`.") +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/cargo/util/toml_mut/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,13 @@ +//! Utilities for in-place editing of Cargo.toml manifests. +//! +//! These utilities operate only on the level of a TOML document, and generally +//! do not perform any processing of information beyond what is required for +//! editing. For more comprehensive usage of manifests, see +//! [`Manifest`](crate::core::manifest::Manifest). +//! +//! In most cases, the entrypoint for editing is +//! [`LocalManifest`](crate::util::toml_mut::manifest::LocalManifest), +//! which contains editing functionality for a given manifest's dependencies. + +pub mod dependency; +pub mod manifest; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/contrib/src/process/index.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/contrib/src/process/index.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/contrib/src/process/index.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/contrib/src/process/index.md 2023-01-10 13:41:19.000000000 +0000 @@ -95,18 +95,19 @@ The Cargo project uses several bots: * [GitHub Actions] are used to automatically run all tests for each PR. -* [rust-highfive] automatically assigns reviewers for PRs. +* [triagebot] automatically assigns reviewers for PRs, see [Assignment] for + how to configure. * [bors] is used to merge PRs. See [The merging process]. * [triagebot] is used for assigning issues to non-members, see [Issue assignment](#issue-assignment). * [rfcbot] is used for making asynchronous decisions by team members. -[rust-highfive]: https://github.com/rust-highfive [bors]: https://buildbot2.rust-lang.org/homu/ [The merging process]: working-on-cargo.md#the-merging-process [GitHub Actions]: https://github.com/features/actions [triagebot]: https://github.com/rust-lang/triagebot/wiki [rfcbot]: https://github.com/rust-lang/rfcbot-rs +[Assignment]: https://github.com/rust-lang/triagebot/wiki/Assignment ## Issue assignment diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/contrib/src/process/working-on-cargo.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/contrib/src/process/working-on-cargo.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/contrib/src/process/working-on-cargo.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/contrib/src/process/working-on-cargo.md 2023-01-10 13:41:19.000000000 +0000 @@ -110,7 +110,7 @@ #1234 to the PR. When the PR is merged, GitHub will automatically close the issue. -The [rust-highfive] bot will automatically assign a reviewer for the PR. It +[`@rustbot`] will automatically assign a reviewer for the PR. It may take at least a few days for someone to respond. If you don't get a response in over a week, feel free to ping the assigned reviewer. @@ -162,7 +162,6 @@ [how-to-clone]: https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository [Testing chapter]: ../tests/index.md [GitHub's keywords]: https://docs.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue -[rust-highfive]: https://github.com/rust-highfive [bors]: https://buildbot2.rust-lang.org/homu/ [`@bors`]: https://github.com/bors [homu-cargo]: https://buildbot2.rust-lang.org/homu/queue/cargo diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-add.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-add.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-add.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-add.md 2023-01-10 13:41:19.000000000 +0000 @@ -91,6 +91,10 @@ {{#options}} +{{#option "`--dry-run`" }} +Don't actually write the manifest +{{/option}} + {{#option "`--rename` _name_" }} [Rename](../reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml) the dependency. {{/option}} @@ -132,6 +136,12 @@ {{#options}} {{> options-manifest-path }} + +{{#option "`-p` _spec_" "`--package` _spec_" }} +Add dependencies to only the specified package. +{{/option}} + +{{> options-locked }} {{/options}} {{> section-options-common }} @@ -159,4 +169,4 @@ cargo add serde serde_json -F serde/derive ## SEE ALSO -{{man "cargo" 1}} +{{man "cargo" 1}}, {{man "cargo-remove" 1}} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-locate-project.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-locate-project.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-locate-project.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-locate-project.md 2023-01-10 13:41:19.000000000 +0000 @@ -10,8 +10,14 @@ ## DESCRIPTION -This command will print a JSON object to stdout with the full path to the -`Cargo.toml` manifest. +This command will print a JSON object to stdout with the full path to the manifest. The +manifest is found by searching upward for a file named `Cargo.toml` starting from the current +working directory. + +If the project happens to be a part of a workspace, the manifest of the project, rather than +the workspace root, is output. This can be overriden by the `--workspace` flag. The root +workspace is found by traversing further upward or by using the field `package.workspace` after +locating the manifest of a workspace member. ## OPTIONS diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-remove.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-remove.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-remove.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-remove.md 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,92 @@ +# cargo-remove(1) +{{*set actionverb="Remove"}} +{{*set nouns="removes"}} + +## NAME + +cargo-remove - Remove dependencies from a Cargo.toml manifest file + +## SYNOPSIS + +`cargo remove` [_options_] _dependency_... + +## DESCRIPTION + +Remove one or more dependencies from a `Cargo.toml` manifest. + +## OPTIONS + +### Section options + +{{#options}} + +{{#option "`--dev`" }} +Remove as a [development dependency](../reference/specifying-dependencies.html#development-dependencies). +{{/option}} + +{{#option "`--build`" }} +Remove as a [build dependency](../reference/specifying-dependencies.html#build-dependencies). +{{/option}} + +{{#option "`--target` _target_" }} +Remove as a dependency to the [given target platform](../reference/specifying-dependencies.html#platform-specific-dependencies). +{{/option}} + +{{/options}} + +### Miscellaneous Options + +{{#options}} + +{{#option "`--dry-run`" }} +Don't actually write to the manifest. +{{/option}} + +{{/options}} + +### Display Options + +{{#options}} +{{> options-display }} +{{/options}} + +### Manifest Options + +{{#options}} +{{> options-manifest-path }} + +{{> options-locked }} +{{/options}} + +### Package Selection + +{{#options}} + +{{#option "`-p` _spec_..." "`--package` _spec_..." }} +Package to remove from. +{{/option}} + +{{/options}} + +{{> section-options-common }} + +{{> section-environment }} + +{{> section-exit-status }} + +## EXAMPLES + +1. Remove `regex` as a dependency + + cargo remove regex + +2. Remove `trybuild` as a dev-dependency + + cargo remove --dev trybuild + +3. Remove `nom` from the `x86_64-pc-windows-gnu` dependencies table + + cargo remove --target x86_64-pc-windows-gnu nom + +## SEE ALSO +{{man "cargo" 1}}, {{man "cargo-add" 1}} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-tree.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-tree.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/cargo-tree.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/cargo-tree.md 2023-01-10 13:41:19.000000000 +0000 @@ -53,6 +53,23 @@ can be helpful to use `-i` flag to show how the features flow into a package. See the examples below for more detail. +### Feature Unification + +This command shows a graph much closer to a feature-unified graph Cargo will +build, rather than what you list in `Cargo.toml`. For instance, if you specify +the same dependency in both `[dependencies]` and `[dev-dependencies]` but with +different features on. This command may merge all features and show a `(*)` on +one of the dependency to indicate the duplicate. + +As a result, for a mostly equivalent overview of what `cargo build` does, +`cargo tree -e normal,build` is pretty close; for a mostly equivalent overview +of what `cargo test` does, `cargo tree` is pretty close. However, it doesn't +guarantee the exact equivalence to what Cargo is going to build, since a +compilation is complex and depends on lots of different factors. + +To learm more about feature unification, check out this +[dedicated section](../reference/features.html#feature-unification). + ## OPTIONS ### Tree Options diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-add.txt cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-add.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-add.txt 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-add.txt 2023-01-10 13:41:19.000000000 +0000 @@ -79,6 +79,9 @@ Dependency options + --dry-run + Don't actually write the manifest + --rename name Rename @@ -137,6 +140,35 @@ Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. + -p spec, --package spec + Add dependencies to only the specified package. + + --frozen, --locked + Either of these flags requires that the Cargo.lock file is + up-to-date. If the lock file is missing, or it needs to be updated, + Cargo will exit with an error. The --frozen flag also prevents Cargo + from attempting to access the network to determine if it is + out-of-date. + + These may be used in environments where you want to assert that the + Cargo.lock file is up-to-date (such as a CI build) or want to avoid + network access. + + --offline + Prevents Cargo from accessing the network for any reason. Without + this flag, Cargo will stop with an error if it needs to access the + network and the network is not available. With this flag, Cargo will + attempt to proceed without the network if possible. + + Beware that this may result in different dependency resolution than + online mode. Cargo will restrict itself to crates that are + downloaded locally, even if there might be a newer version as + indicated in the local copy of the index. See the cargo-fetch(1) + command to download dependencies before going offline. + + May also be specified with the net.offline config value + . + Common Options +toolchain If Cargo has been installed with rustup, and the first argument to @@ -188,5 +220,5 @@ cargo add serde serde_json -F serde/derive SEE ALSO - cargo(1) + cargo(1), cargo-remove(1) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-locate-project.txt cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-locate-project.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-locate-project.txt 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-locate-project.txt 2023-01-10 13:41:19.000000000 +0000 @@ -9,7 +9,14 @@ DESCRIPTION This command will print a JSON object to stdout with the full path to - the Cargo.toml manifest. + the manifest. The manifest is found by searching upward for a file named + Cargo.toml starting from the current working directory. + + If the project happens to be a part of a workspace, the manifest of the + project, rather than the workspace root, is output. This can be + overriden by the --workspace flag. The root workspace is found by + traversing further upward or by using the field package.workspace after + locating the manifest of a workspace member. OPTIONS --workspace diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-remove.txt cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-remove.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-remove.txt 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-remove.txt 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,138 @@ +CARGO-REMOVE(1) + +NAME + cargo-remove - Remove dependencies from a Cargo.toml manifest file + +SYNOPSIS + cargo remove [options] dependency... + +DESCRIPTION + Remove one or more dependencies from a Cargo.toml manifest. + +OPTIONS + Section options + --dev + Remove as a development dependency + . + + --build + Remove as a build dependency + . + + --target target + Remove as a dependency to the given target platform + . + + Miscellaneous Options + --dry-run + Don't actually write to the manifest. + + Display Options + -v, --verbose + Use verbose output. May be specified twice for "very verbose" output + which includes extra output such as dependency warnings and build + script output. May also be specified with the term.verbose config + value . + + -q, --quiet + Do not print cargo log messages. May also be specified with the + term.quiet config value + . + + --color when + Control when colored output is used. Valid values: + + o auto (default): Automatically detect if color support is + available on the terminal. + + o always: Always display colors. + + o never: Never display colors. + + May also be specified with the term.color config value + . + + Manifest Options + --manifest-path path + Path to the Cargo.toml file. By default, Cargo searches for the + Cargo.toml file in the current directory or any parent directory. + + --frozen, --locked + Either of these flags requires that the Cargo.lock file is + up-to-date. If the lock file is missing, or it needs to be updated, + Cargo will exit with an error. The --frozen flag also prevents Cargo + from attempting to access the network to determine if it is + out-of-date. + + These may be used in environments where you want to assert that the + Cargo.lock file is up-to-date (such as a CI build) or want to avoid + network access. + + --offline + Prevents Cargo from accessing the network for any reason. Without + this flag, Cargo will stop with an error if it needs to access the + network and the network is not available. With this flag, Cargo will + attempt to proceed without the network if possible. + + Beware that this may result in different dependency resolution than + online mode. Cargo will restrict itself to crates that are + downloaded locally, even if there might be a newer version as + indicated in the local copy of the index. See the cargo-fetch(1) + command to download dependencies before going offline. + + May also be specified with the net.offline config value + . + + Package Selection + -p spec..., --package spec... + Package to remove from. + + Common Options + +toolchain + If Cargo has been installed with rustup, and the first argument to + cargo begins with +, it will be interpreted as a rustup toolchain + name (such as +stable or +nightly). See the rustup documentation + for more + information about how toolchain overrides work. + + --config KEY=VALUE or PATH + Overrides a Cargo configuration value. The argument should be in + TOML syntax of KEY=VALUE, or provided as a path to an extra + configuration file. This flag may be specified multiple times. See + the command-line overrides section + + for more information. + + -h, --help + Prints help information. + + -Z flag + Unstable (nightly-only) flags to Cargo. Run cargo -Z help for + details. + +ENVIRONMENT + See the reference + + for details on environment variables that Cargo reads. + +EXIT STATUS + o 0: Cargo succeeded. + + o 101: Cargo failed to complete. + +EXAMPLES + 1. Remove regex as a dependency + + cargo remove regex + + 2. Remove trybuild as a dev-dependency + + cargo remove --dev trybuild + + 3. Remove nom from the x86_64-pc-windows-gnu dependencies table + + cargo remove --target x86_64-pc-windows-gnu nom + +SEE ALSO + cargo(1), cargo-add(1) + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-tree.txt cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-tree.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-tree.txt 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/man/generated_txt/cargo-tree.txt 2023-01-10 13:41:19.000000000 +0000 @@ -44,6 +44,25 @@ it can be helpful to use -i flag to show how the features flow into a package. See the examples below for more detail. + Feature Unification + This command shows a graph much closer to a feature-unified graph Cargo + will build, rather than what you list in Cargo.toml. For instance, if + you specify the same dependency in both [dependencies] and + [dev-dependencies] but with different features on. This command may + merge all features and show a (*) on one of the dependency to indicate + the duplicate. + + As a result, for a mostly equivalent overview of what cargo build does, + cargo tree -e normal,build is pretty close; for a mostly equivalent + overview of what cargo test does, cargo tree is pretty close. However, + it doesn't guarantee the exact equivalence to what Cargo is going to + build, since a compilation is complex and depends on lots of different + factors. + + To learm more about feature unification, check out this dedicated + section + . + OPTIONS Tree Options -i spec, --invert spec diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/appendix/git-authentication.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/appendix/git-authentication.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/appendix/git-authentication.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/appendix/git-authentication.md 2023-01-10 13:41:19.000000000 +0000 @@ -58,9 +58,34 @@ > used by Cargo's built-in SSH library. More advanced requirements should use > [`net.git-fetch-with-cli`]. +### SSH Known Hosts + +When connecting to an SSH host, Cargo must verify the identity of the host +using "known hosts", which are a list of host keys. Cargo can look for these +known hosts in OpenSSH-style `known_hosts` files located in their standard +locations (`.ssh/known_hosts` in your home directory, or +`/etc/ssh/ssh_known_hosts` on Unix-like platforms or +`%PROGRAMDATA%\ssh\ssh_known_hosts` on Windows). More information about these +files can be found in the [sshd man page]. Alternatively, keys may be +configured in a Cargo configuration file with [`net.ssh.known-hosts`]. + +When connecting to an SSH host before the known hosts has been configured, +Cargo will display an error message instructing you how to add the host key. +This also includes a "fingerprint", which is a smaller hash of the host key, +which should be easier to visually verify. The server administrator can get +the fingerprint by running `ssh-keygen` against the public key (for example, +`ssh-keygen -l -f /etc/ssh/ssh_host_ecdsa_key.pub`). Well-known sites may +publish their fingerprints on the web; for example GitHub posts theirs at +. + +Cargo comes with the host keys for [github.com](https://github.com) built-in. +If those ever change, you can add the new keys to the config or known_hosts file. + [`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage [`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli +[`net.ssh.known-hosts`]: ../reference/config.md#netsshknown-hosts [GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/ [PuTTY]: https://www.chiark.greenend.org.uk/~sgtatham/putty/ [Microsoft installation documentation]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse [key management]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_keymanagement +[sshd man page]: https://man.openbsd.org/sshd#SSH_KNOWN_HOSTS_FILE_FORMAT diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-add.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-add.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-add.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-add.md 2023-01-10 13:41:19.000000000 +0000 @@ -97,6 +97,10 @@
+
--dry-run
+
Don't actually write the manifest
+ +
--rename name
Rename the dependency.
@@ -168,6 +172,36 @@ Cargo.toml file in the current directory or any parent directory. + +
-p spec
+
--package spec
+
Add dependencies to only the specified package.
+ + +
--frozen
+
--locked
+
Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+ + +
--offline
+
Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+

May also be specified with the net.offline config value.

+ +
### Common Options @@ -231,4 +265,4 @@ cargo add serde serde_json -F serde/derive ## SEE ALSO -[cargo(1)](cargo.html) +[cargo(1)](cargo.html), [cargo-remove(1)](cargo-remove.html) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-locate-project.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-locate-project.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-locate-project.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-locate-project.md 2023-01-10 13:41:19.000000000 +0000 @@ -10,8 +10,14 @@ ## DESCRIPTION -This command will print a JSON object to stdout with the full path to the -`Cargo.toml` manifest. +This command will print a JSON object to stdout with the full path to the manifest. The +manifest is found by searching upward for a file named `Cargo.toml` starting from the current +working directory. + +If the project happens to be a part of a workspace, the manifest of the project, rather than +the workspace root, is output. This can be overriden by the `--workspace` flag. The root +workspace is found by traversing further upward or by using the field `package.workspace` after +locating the manifest of a workspace member. ## OPTIONS diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-remove.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-remove.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-remove.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-remove.md 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,182 @@ +# cargo-remove(1) + + + +## NAME + +cargo-remove - Remove dependencies from a Cargo.toml manifest file + +## SYNOPSIS + +`cargo remove` [_options_] _dependency_... + +## DESCRIPTION + +Remove one or more dependencies from a `Cargo.toml` manifest. + +## OPTIONS + +### Section options + +
+ +
--dev
+
Remove as a development dependency.
+ + +
--build
+
Remove as a build dependency.
+ + +
--target target
+
Remove as a dependency to the given target platform.
+ + +
+ +### Miscellaneous Options + +
+ +
--dry-run
+
Don't actually write to the manifest.
+ + +
+ +### Display Options + +
+
-v
+
--verbose
+
Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.
+ + +
-q
+
--quiet
+
Do not print cargo log messages. +May also be specified with the term.quiet +config value.
+ + +
--color when
+
Control when colored output is used. Valid values:

+
    +
  • auto (default): Automatically detect if color support is available on the +terminal.
  • +
  • always: Always display colors.
  • +
  • never: Never display colors.
  • +
+

May also be specified with the term.color +config value.

+ + +
+ +### Manifest Options + +
+
--manifest-path path
+
Path to the Cargo.toml file. By default, Cargo searches for the +Cargo.toml file in the current directory or any parent directory.
+ + + +
--frozen
+
--locked
+
Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+ + +
--offline
+
Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+

May also be specified with the net.offline config value.

+ + +
+ +### Package Selection + +
+ +
-p spec...
+
--package spec...
+
Package to remove from.
+ + +
+ +### Common Options + +
+ +
+toolchain
+
If Cargo has been installed with rustup, and the first argument to cargo +begins with +, it will be interpreted as a rustup toolchain name (such +as +stable or +nightly). +See the rustup documentation +for more information about how toolchain overrides work.
+ + +
--config KEY=VALUE or PATH
+
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, +or provided as a path to an extra configuration file. This flag may be specified multiple times. +See the command-line overrides section for more information.
+ + +
-h
+
--help
+
Prints help information.
+ + +
-Z flag
+
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
+ + +
+ + +## ENVIRONMENT + +See [the reference](../reference/environment-variables.html) for +details on environment variables that Cargo reads. + + +## EXIT STATUS + +* `0`: Cargo succeeded. +* `101`: Cargo failed to complete. + + +## EXAMPLES + +1. Remove `regex` as a dependency + + cargo remove regex + +2. Remove `trybuild` as a dev-dependency + + cargo remove --dev trybuild + +3. Remove `nom` from the `x86_64-pc-windows-gnu` dependencies table + + cargo remove --target x86_64-pc-windows-gnu nom + +## SEE ALSO +[cargo(1)](cargo.html), [cargo-add(1)](cargo-add.html) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-tree.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-tree.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-tree.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/cargo-tree.md 2023-01-10 13:41:19.000000000 +0000 @@ -53,6 +53,23 @@ can be helpful to use `-i` flag to show how the features flow into a package. See the examples below for more detail. +### Feature Unification + +This command shows a graph much closer to a feature-unified graph Cargo will +build, rather than what you list in `Cargo.toml`. For instance, if you specify +the same dependency in both `[dependencies]` and `[dev-dependencies]` but with +different features on. This command may merge all features and show a `(*)` on +one of the dependency to indicate the duplicate. + +As a result, for a mostly equivalent overview of what `cargo build` does, +`cargo tree -e normal,build` is pretty close; for a mostly equivalent overview +of what `cargo test` does, `cargo tree` is pretty close. However, it doesn't +guarantee the exact equivalence to what Cargo is going to build, since a +compilation is complex and depends on lots of different factors. + +To learm more about feature unification, check out this +[dedicated section](../reference/features.html#feature-unification). + ## OPTIONS ### Tree Options diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/manifest-commands.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/manifest-commands.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/commands/manifest-commands.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/commands/manifest-commands.md 2023-01-10 13:41:19.000000000 +0000 @@ -4,6 +4,7 @@ * [cargo locate-project](cargo-locate-project.md) * [cargo metadata](cargo-metadata.md) * [cargo pkgid](cargo-pkgid.md) +* [cargo remove](cargo-remove.md) * [cargo tree](cargo-tree.md) * [cargo update](cargo-update.md) * [cargo vendor](cargo-vendor.md) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/faq.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/faq.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/faq.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/faq.md 2023-01-10 13:41:19.000000000 +0000 @@ -97,7 +97,7 @@ Yes! All commits to Cargo are required to pass the local test suite on Windows. -If, however, you find a Windows issue, we consider it a bug, so [please file an +If you encounter an issue while running on Windows, we consider it a bug, so [please file an issue][3]. [3]: https://github.com/rust-lang/cargo/issues diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/guide/tests.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/guide/tests.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/guide/tests.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/guide/tests.md 2023-01-10 13:41:19.000000000 +0000 @@ -32,7 +32,7 @@ This will run any test with `foo` in its name. `cargo test` runs additional checks as well. It will compile any examples -you’ve included to ensure they are still compiles. It also run documentation +you’ve included to ensure they still compile. It also runs documentation tests to ensure your code samples from documentation comments compiles. Please see the [testing guide][testing] in the Rust documentation for a general view of writing and organizing tests. See [Cargo Targets: Tests] to learn more diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/build-script-examples.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/build-script-examples.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/build-script-examples.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/build-script-examples.md 2023-01-10 13:41:19.000000000 +0000 @@ -49,6 +49,7 @@ [package] name = "hello-from-generated-code" version = "0.1.0" +edition = "2021" ``` Let’s see what’s inside the build script: diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/build-scripts.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/build-scripts.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/build-scripts.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/build-scripts.md 2023-01-10 13:41:19.000000000 +0000 @@ -103,17 +103,17 @@ re-run the script. * [`cargo:rerun-if-env-changed=VAR`](#rerun-if-env-changed) — Tells Cargo when to re-run the script. -* [`cargo:rustc-link-arg=FLAG`](#rustc-link-arg) – Passes custom flags to a +* [`cargo:rustc-link-arg=FLAG`](#rustc-link-arg) — Passes custom flags to a linker for benchmarks, binaries, `cdylib` crates, examples, and tests. -* [`cargo:rustc-link-arg-bin=BIN=FLAG`](#rustc-link-arg-bin) – Passes custom +* [`cargo:rustc-link-arg-bin=BIN=FLAG`](#rustc-link-arg-bin) — Passes custom flags to a linker for the binary `BIN`. -* [`cargo:rustc-link-arg-bins=FLAG`](#rustc-link-arg-bins) – Passes custom +* [`cargo:rustc-link-arg-bins=FLAG`](#rustc-link-arg-bins) — Passes custom flags to a linker for binaries. -* [`cargo:rustc-link-arg-tests=FLAG`](#rustc-link-arg-tests) – Passes custom +* [`cargo:rustc-link-arg-tests=FLAG`](#rustc-link-arg-tests) — Passes custom flags to a linker for tests. -* [`cargo:rustc-link-arg-examples=FLAG`](#rustc-link-arg-examples) – Passes custom +* [`cargo:rustc-link-arg-examples=FLAG`](#rustc-link-arg-examples) — Passes custom flags to a linker for examples. -* [`cargo:rustc-link-arg-benches=FLAG`](#rustc-link-arg-benches) – Passes custom +* [`cargo:rustc-link-arg-benches=FLAG`](#rustc-link-arg-benches) — Passes custom flags to a linker for benchmarks. * [`cargo:rustc-link-lib=LIB`](#rustc-link-lib) — Adds a library to link. @@ -443,8 +443,7 @@ prevent running the build script in question altogether and instead supply the metadata ahead of time. -To override a build script, place the following configuration in any acceptable -Cargo [configuration location](config.md). +To override a build script, place the following configuration in any acceptable [`config.toml`](config.md) file. ```toml [target.x86_64-unknown-linux-gnu.foo] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/config.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/config.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/config.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/config.md 2023-01-10 13:41:19.000000000 +0000 @@ -114,6 +114,9 @@ git-fetch-with-cli = true # use the `git` executable for git operations offline = true # do not access the network +[net.ssh] +known-hosts = ["..."] # known SSH host keys + [patch.] # Same keys as for [patch] in Cargo.toml @@ -190,8 +193,9 @@ Environment variables will take precedence over TOML configuration files. Currently only integer, boolean, string and some array values are supported to -be defined by environment variables. Descriptions below indicate which keys -support environment variables. +be defined by environment variables. [Descriptions below](#configuration-keys) +indicate which keys support environment variables and otherwise they are not +supported due to [technicial issues](https://github.com/rust-lang/cargo/issues/5416). In addition to the system above, Cargo recognizes a few other specific [environment variables][env]. @@ -748,6 +752,41 @@ Can be overridden with the `--offline` command-line option. +##### `net.ssh` + +The `[net.ssh]` table contains settings for SSH connections. + +##### `net.ssh.known-hosts` +* Type: array of strings +* Default: see description +* Environment: not supported + +The `known-hosts` array contains a list of SSH host keys that should be +accepted as valid when connecting to an SSH server (such as for SSH git +dependencies). Each entry should be a string in a format similar to OpenSSH +`known_hosts` files. Each string should start with one or more hostnames +separated by commas, a space, the key type name, a space, and the +base64-encoded key. For example: + +```toml +[net.ssh] +known-hosts = [ + "example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFO4Q5T0UV0SQevair9PFwoxY9dl4pQl3u5phoqJH3cF" +] +``` + +Cargo will attempt to load known hosts keys from common locations supported in +OpenSSH, and will join those with any listed in a Cargo configuration file. +If any matching entry has the correct key, the connection will be allowed. + +Cargo comes with the host keys for [github.com][github-keys] built-in. If +those ever change, you can add the new keys to the config or known_hosts file. + +See [Git Authentication](../appendix/git-authentication.md#ssh-known-hosts) +for more details. + +[github-keys]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints + #### `[patch]` Just as you can override dependencies using [`[patch]` in @@ -932,7 +971,7 @@ * Default: none * Environment: not supported -If set, replace this source with the given named source. +If set, replace this source with the given named source or named registry. ##### `source..directory` * Type: string (path) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/environment-variables.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/environment-variables.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/environment-variables.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/environment-variables.md 2023-01-10 13:41:19.000000000 +0000 @@ -75,9 +75,9 @@ #### Configuration environment variables -Cargo reads environment variables for configuration values. See the -[configuration chapter][config-env] for more details. In summary, the -supported environment variables are: +Cargo reads environment variables for some configuration values. +See the [configuration chapter][config-env] for more details. +In summary, the supported environment variables are: * `CARGO_ALIAS_` — Command aliases, see [`alias`]. * `CARGO_BUILD_JOBS` — Number of parallel jobs, see [`build.jobs`]. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/manifest.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/manifest.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/manifest.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/manifest.md 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,10 @@ ## The Manifest Format The `Cargo.toml` file for each package is called its *manifest*. It is written -in the [TOML] format. Every manifest file consists of the following sections: +in the [TOML] format. It contains metadata that is needed to compile the package. Checkout +the `cargo locate-project` section for more detail on how cargo finds the manifest file. + +Every manifest file consists of the following sections: * [`cargo-features`](unstable.md) — Unstable, nightly-only features. * [`[package]`](#the-package-section) — Defines a package. @@ -112,12 +115,18 @@ #### The `authors` field -The optional `authors` field lists people or organizations that are considered +The optional `authors` field lists in an array the people or organizations that are considered the "authors" of the package. The exact meaning is open to interpretation — it may list the original or primary authors, current maintainers, or owners of the package. An optional email address may be included within angled brackets at the end of each author entry. +```toml +[package] +# ... +authors = ["Graydon Hoare", "Fnu Lnu "] +``` + This field is only surfaced in package metadata and in the `CARGO_PKG_AUTHORS` environment variable within `build.rs`. It is not displayed in the [crates.io] user interface. @@ -516,7 +525,7 @@ to the data in `workspace.metadata` if data is missing from `package.metadata`, if that makes sense for the tool in question. -[workspace-metadata]: workspaces.md#the-workspacemetadata-table +[workspace-metadata]: workspaces.md#the-metadata-table #### The `default-run` field @@ -603,7 +612,7 @@ "#the-required-features-field-optional": "cargo-targets.html#the-required-features-field", "#building-dynamic-or-static-libraries": "cargo-targets.html#the-crate-type-field", "#the-workspace-section": "workspaces.html#the-workspace-section", - "#virtual-manifest": "workspaces.html", + "#virtual-workspace": "workspaces.html", "#package-selection": "workspaces.html#package-selection", "#the-features-section": "features.html#the-features-section", "#rules": "features.html", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/overriding-dependencies.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/overriding-dependencies.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/overriding-dependencies.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/overriding-dependencies.md 2023-01-10 13:41:19.000000000 +0000 @@ -291,6 +291,10 @@ patched with a crate version that already exists in the source, then the source's original crate is replaced. +Cargo only looks at the patch settings in the `Cargo.toml` manifest at the +root of the workspace. Patch settings defined in dependencies will be +ignored. + ### The `[replace]` section > **Note**: `[replace]` is deprecated. You should use the @@ -313,6 +317,10 @@ is overridden the copy it's overridden with must have both the same name and version, but it can come from a different source (e.g., git or a local path). +Cargo only looks at the replace settings in the `Cargo.toml` manifest at the +root of the workspace. Replace settings defined in dependencies will be +ignored. + ### `paths` overrides Sometimes you're only temporarily working on a crate and you don't want to have diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/publishing.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/publishing.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/publishing.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/publishing.md 2023-01-10 13:41:19.000000000 +0000 @@ -32,8 +32,8 @@ ### Before publishing a new crate -Keep in mind that crate names on [crates.io] are allocated on a first-come-first- -serve basis. Once a crate name is taken, it cannot be used for another crate. +Keep in mind that crate names on [crates.io] are allocated on a first-come-first-serve +basis. Once a crate name is taken, it cannot be used for another crate. Check out the [metadata you can specify](manifest.md) in `Cargo.toml` to ensure your crate can be discovered more easily! Before publishing, make sure diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/registries.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/registries.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/registries.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/registries.md 2023-01-10 13:41:19.000000000 +0000 @@ -29,6 +29,7 @@ [package] name = "my-project" version = "0.1.0" +edition = "2021" [dependencies] other-crate = { version = "1.0", registry = "my-registry" } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/resolver.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/resolver.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/resolver.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/resolver.md 2023-01-10 13:41:19.000000000 +0000 @@ -428,7 +428,7 @@ resolver = "2" ``` -[virtual workspace]: workspaces.md#virtual-manifest +[virtual workspace]: workspaces.md#virtual-workspace [features-2]: features.md#feature-resolver-version-2 ## Recommendations diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/semver.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/semver.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/semver.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/semver.md 2023-01-10 13:41:19.000000000 +0000 @@ -391,7 +391,7 @@ fn main() { use updated_crate::E; let x = E::Variant1; - match x { // Error: `Variant2` not covered + match x { // Error: `E::Variant2` not covered E::Variant1 => {} } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/source-replacement.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/source-replacement.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/source-replacement.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/source-replacement.md 2023-01-10 13:41:19.000000000 +0000 @@ -26,6 +26,11 @@ dependencies], and private registry support is described in [the Registries chapter][registries]. +When using source replacement, running commands like `cargo publish` that need to +contact the registry require passing the `--registry` option. This helps avoid +any ambiguity about which registry to contact, and will use the authentication +token for the specified registry. + [overriding dependencies]: overriding-dependencies.md [registries]: registries.md @@ -50,6 +55,9 @@ # The crates.io default source for crates is available under the name # "crates-io", and here we use the `replace-with` key to indicate that it's # replaced with our source above. +# +# The `replace-with` key can also reference an alternative registry name +# defined in the `[registries]` table. [source.crates-io] replace-with = "my-vendor-source" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/specifying-dependencies.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/specifying-dependencies.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/specifying-dependencies.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/specifying-dependencies.md 2023-01-10 13:41:19.000000000 +0000 @@ -472,7 +472,7 @@ `[workspace.dependencies]` definition of dependencies. ```toml -[project] +[package] name = "bar" version = "0.2.0" @@ -489,7 +489,7 @@ [crates.io]: https://crates.io/ [dev-dependencies]: #development-dependencies -[workspace.dependencies]: workspaces.md#the-workspacedependencies-table +[workspace.dependencies]: workspaces.md#the-dependencies-table [optional]: features.md#optional-dependencies [features]: features.md diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/unstable.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/unstable.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/unstable.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/unstable.md 2023-01-10 13:41:19.000000000 +0000 @@ -99,6 +99,7 @@ * [credential-process](#credential-process) — Adds support for fetching registry tokens from an external authentication program. * [`cargo logout`](#cargo-logout) — Adds the `logout` command to remove the currently saved registry token. * [sparse-registry](#sparse-registry) — Adds support for fetching from static-file HTTP registries (`sparse+`) + * [publish-timeout](#publish-timeout) — Controls the timeout between uploading the crate and being available in the index ### allow-features @@ -841,6 +842,23 @@ The format of the sparse index is identical to a checkout of a git-based index. +### publish-timeout +* Tracking Issue: [11222](https://github.com/rust-lang/cargo/issues/11222) + +The `publish.timeout` key in a config file can be used to control how long +`cargo publish` waits between posting a package to the registry and it being +available in the local index. + +A timeout of `0` prevents any checks from occurring. + +It requires the `-Zpublish-timeout` command-line options to be set. + +```toml +# config.toml +[publish] +timeout = 300 # in seconds +``` + ### credential-process * Tracking Issue: [#8933](https://github.com/rust-lang/cargo/issues/8933) * RFC: [#2730](https://github.com/rust-lang/rfcs/pull/2730) @@ -1083,7 +1101,7 @@ ```toml cargo-features = ["different-binary-name"] -[project] +[package] name = "foo" version = "0.0.1" @@ -1358,7 +1376,7 @@ ### Workspace Inheritance Workspace Inheritance has been stabilized in the 1.64 release. -See [workspace.package](workspaces.md#the-workspacepackage-table), -[workspace.dependencies](workspaces.md#the-workspacedependencies-table), +See [workspace.package](workspaces.md#the-package-table), +[workspace.dependencies](workspaces.md#the-dependencies-table), and [inheriting-a-dependency-from-a-workspace](specifying-dependencies.md#inheriting-a-dependency-from-a-workspace) -for more information. \ No newline at end of file +for more information. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/workspaces.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/workspaces.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/reference/workspaces.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/reference/workspaces.md 2023-01-10 13:41:19.000000000 +0000 @@ -1,41 +1,86 @@ ## Workspaces -A *workspace* is a collection of one or more packages that share common -dependency resolution (with a shared `Cargo.lock`), output directory, and -various settings such as profiles. Packages that are part of a workspaces are -called *workspace members*. There are two flavours of workspaces: as root -package or as virtual manifest. - -### Root package - -A workspace can be created by adding a [`[workspace]` -section](#the-workspace-section) to `Cargo.toml`. This can be added to a -`Cargo.toml` that already defines a `[package]`, in which case the package is -the *root package* of the workspace. The *workspace root* is the directory -where the workspace's `Cargo.toml` is located. - -### Virtual manifest - -Alternatively, a `Cargo.toml` file can be created with a `[workspace]` section -but without a [`[package]` section][package]. This is called a *virtual -manifest*. This is typically useful when there isn't a "primary" package, or -you want to keep all the packages organized in separate directories. - -### Key features +A *workspace* is a collection of one or more packages, called *workspace +members*, that are managed together. The key points of workspaces are: -* All packages share a common `Cargo.lock` file which resides in the +* Common commands can run across all workspace members, like `cargo check --workspace`. +* All packages share a common [`Cargo.lock`] file which resides in the *workspace root*. * All packages share a common [output directory], which defaults to a directory named `target` in the *workspace root*. +* Sharing package metadata, like with [`workspace.package`](#the-package-table). * The [`[patch]`][patch], [`[replace]`][replace] and [`[profile.*]`][profiles] sections in `Cargo.toml` are only recognized in the *root* manifest, and ignored in member crates' manifests. +In the `Cargo.toml`, the `[workspace]` table supports the following sections: + +* [`[workspace]`](#the-workspace-section) — Defines a workspace. + * [`resolver`](resolver.md#resolver-versions) — Sets the dependency resolver to use. + * [`members`](#the-members-and-exclude-fields) — Packages to include in the workspace. + * [`exclude`](#the-members-and-exclude-fields) — Packages to exclude from the workspace. + * [`default-members`](#the-default-members-field) — Packages to operate on when a specific package wasn't selected. + * [`package`](#the-package-table) — Keys for inheriting in packages. + * [`dependencies`](#the-dependencies-table) — Keys for inheriting in package dependencies. + * [`metadata`](#the-metadata-table) — Extra settings for external tools. +* [`[patch]`](overriding-dependencies.md#the-patch-section) — Override dependencies. +* [`[replace]`](overriding-dependencies.md#the-replace-section) — Override dependencies (deprecated). +* [`[profile]`](profiles.md) — Compiler settings and optimizations. + ### The `[workspace]` section -The `[workspace]` table in `Cargo.toml` defines which packages are members of +To create a workspace, you add the `[workspace]` table to a `Cargo.toml`: +```toml +[workspace] +# ... +``` + +At minimum, a workspace has to have a member, either with a root package or as +a virtual manifest. + +#### Root package + +If the [`[workspace]` section](#the-workspace-section) is added to a +`Cargo.toml` that already defines a `[package]`, the package is +the *root package* of the workspace. The *workspace root* is the directory +where the workspace's `Cargo.toml` is located. + +```toml +[workspace] + +[package] +name = "hello_world" # the name of the package +version = "0.1.0" # the current version, obeying semver +authors = ["Alice ", "Bob "] +``` + + +#### Virtual workspace + +Alternatively, a `Cargo.toml` file can be created with a `[workspace]` section +but without a [`[package]` section][package]. This is called a *virtual +manifest*. This is typically useful when there isn't a "primary" package, or +you want to keep all the packages organized in separate directories. + +```toml +# [PROJECT_DIR]/Cargo.toml +[workspace] +members = ["hello_world"] +``` + +```toml +# [PROJECT_DIR]/hello_world/Cargo.toml +[package] +name = "hello_world" # the name of the package +version = "0.1.0" # the current version, obeying semver +authors = ["Alice ", "Bob "] +``` + +### The `members` and `exclude` fields + +The `members` and `exclude` fields define which packages are members of the workspace: ```toml @@ -56,11 +101,6 @@ in the workspace at all, or using a glob pattern and you want to remove a directory. -An empty `[workspace]` table can be used with a `[package]` to conveniently -create a workspace with the package and all of its path dependencies. - -### Workspace selection - When inside a subdirectory within the workspace, Cargo will automatically search the parent directories for a `Cargo.toml` file with a `[workspace]` definition to determine which workspace to use. The [`package.workspace`] @@ -68,14 +108,17 @@ override this automatic search. The manual setting can be useful if the member is not inside a subdirectory of the workspace root. -### Package selection +#### Package selection In a workspace, package-related cargo commands like [`cargo build`] can use the `-p` / `--package` or `--workspace` command-line flags to determine which packages to operate on. If neither of those flags are specified, Cargo will use the package in the current working directory. If the current directory is -a virtual workspace, it will apply to all members (as if `--workspace` were -specified on the command-line). +a [virtual workspace](#virtual-workspace), it will apply to all members (as if +`--workspace` were specified on the command-line). See also +[`default-members`](#the-default-members-field). + +### The `default-members` field The optional `default-members` key can be specified to set the members to operate on when in the workspace root and the package selection flags are not @@ -89,30 +132,7 @@ When specified, `default-members` must expand to a subset of `members`. -### The `workspace.metadata` table - -The `workspace.metadata` table is ignored by Cargo and will not be warned -about. This section can be used for tools that would like to store workspace -configuration in `Cargo.toml`. For example: - -```toml -[workspace] -members = ["member1", "member2"] - -[workspace.metadata.webcontents] -root = "path/to/webproject" -tool = ["npm", "run", "build"] -# ... -``` - -There is a similar set of tables at the package level at -[`package.metadata`][package-metadata]. While cargo does not specify a -format for the content of either of these tables, it is suggested that -external tools may wish to use them in a consistent fashion, such as referring -to the data in `workspace.metadata` if data is missing from `package.metadata`, -if that makes sense for the tool in question. - -### The `workspace.package` table +### The `package` table The `workspace.package` table is where you define keys that can be inherited by members of a workspace. These keys can be inherited by @@ -157,7 +177,7 @@ documentation.workspace = true ``` -### The `workspace.dependencies` table +### The `dependencies` table The `workspace.dependencies` table is where you define dependencies to be inherited by members of a workspace. @@ -182,7 +202,7 @@ ```toml # [PROJECT_DIR]/bar/Cargo.toml -[project] +[package] name = "bar" version = "0.2.0" @@ -196,7 +216,31 @@ rand.workspace = true ``` +### The `metadata` table + +The `workspace.metadata` table is ignored by Cargo and will not be warned +about. This section can be used for tools that would like to store workspace +configuration in `Cargo.toml`. For example: + +```toml +[workspace] +members = ["member1", "member2"] + +[workspace.metadata.webcontents] +root = "path/to/webproject" +tool = ["npm", "run", "build"] +# ... +``` + +There is a similar set of tables at the package level at +[`package.metadata`][package-metadata]. While cargo does not specify a +format for the content of either of these tables, it is suggested that +external tools may wish to use them in a consistent fashion, such as referring +to the data in `workspace.metadata` if data is missing from `package.metadata`, +if that makes sense for the tool in question. + [package]: manifest.md#the-package-section +[`Cargo.lock`]: ../guide/cargo-toml-vs-cargo-lock.md [package-metadata]: manifest.md#the-metadata-table [output directory]: ../guide/build-cache.md [patch]: overriding-dependencies.md#the-patch-section diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/SUMMARY.md cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/SUMMARY.md --- cargo-0.66.0+ds0ubuntu0.libgit2/src/doc/src/SUMMARY.md 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/doc/src/SUMMARY.md 2023-01-10 13:41:19.000000000 +0000 @@ -66,6 +66,7 @@ * [cargo locate-project](commands/cargo-locate-project.md) * [cargo metadata](commands/cargo-metadata.md) * [cargo pkgid](commands/cargo-pkgid.md) + * [cargo remove](commands/cargo-remove.md) * [cargo tree](commands/cargo-tree.md) * [cargo update](commands/cargo-update.md) * [cargo vendor](commands/cargo-vendor.md) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/_cargo cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/_cargo --- cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/_cargo 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/_cargo 2023-01-10 13:41:19.000000000 +0000 @@ -241,6 +241,17 @@ _arguments -s -S $common $manifest ;; + remove | rm) + _arguments -s -A "^--" $common $manifest \ + "--dev[remove as a dev dependency]" \ + "--build[remove as a build dependency]" \ + "--target=[remove as a dependency from the given target platform]" \ + "--dry-run[don't actually write the manifest]" \ + '(-p --package)'{-p+,--package=}'[package to remove from]:package:_cargo_package_names' \ + '1: :_guard "^-*" "crate name"' \ + '*:args:_default' + ;; + run | r) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--example=[name of the bin target]:name:_cargo_example_names' \ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/cargo.bashcomp.sh cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/cargo.bashcomp.sh --- cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/cargo.bashcomp.sh 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/cargo.bashcomp.sh 2023-01-10 13:41:19.000000000 +0000 @@ -73,6 +73,8 @@ local opt__pkgid="$opt_common $opt_mani $opt_lock $opt_pkg" local opt__publish="$opt_common $opt_mani $opt_feat $opt_lock $opt_parallel --allow-dirty --dry-run --token --no-verify --index --registry --target --target-dir" local opt__read_manifest="$opt_help $opt_quiet $opt_verbose $opt_mani $opt_color $opt_lock --no-deps" + local opt__remove="$opt_common $opt_pkg $opt_lock $opt_mani --dry-run --dev --build --target" + local opt__rm="$opt__remove" local opt__report="$opt_help $opt_verbose $opt_color future-incompat future-incompatibilities" local opt__report__future_incompat="$opt_help $opt_verbose $opt_color $opt_pkg --id" local opt__run="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel --message-format --target --bin --example --release --target-dir --profile" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-add.1 cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-add.1 --- cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-add.1 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-add.1 2023-01-10 13:41:19.000000000 +0000 @@ -100,6 +100,11 @@ .SS "Dependency options" .sp +\fB\-\-dry\-run\fR +.RS 4 +Don't actually write the manifest +.RE +.sp \fB\-\-rename\fR \fIname\fR .RS 4 \fIRename\fR the dependency. @@ -179,6 +184,41 @@ Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE +.sp +\fB\-p\fR \fIspec\fR, +\fB\-\-package\fR \fIspec\fR +.RS 4 +Add dependencies to only the specified package. +.RE +.sp +\fB\-\-frozen\fR, +\fB\-\-locked\fR +.RS 4 +Either of these flags requires that the \fBCargo.lock\fR file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fR +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fR(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. +.RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR @@ -261,4 +301,4 @@ .RE .RE .SH "SEE ALSO" -\fBcargo\fR(1) +\fBcargo\fR(1), \fBcargo\-remove\fR(1) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-locate-project.1 cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-locate-project.1 --- cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-locate-project.1 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-locate-project.1 2023-01-10 13:41:19.000000000 +0000 @@ -8,8 +8,14 @@ .SH "SYNOPSIS" \fBcargo locate\-project\fR [\fIoptions\fR] .SH "DESCRIPTION" -This command will print a JSON object to stdout with the full path to the -\fBCargo.toml\fR manifest. +This command will print a JSON object to stdout with the full path to the manifest. The +manifest is found by searching upward for a file named \fBCargo.toml\fR starting from the current +working directory. +.sp +If the project happens to be a part of a workspace, the manifest of the project, rather than +the workspace root, is output. This can be overriden by the \fB\-\-workspace\fR flag. The root +workspace is found by traversing further upward or by using the field \fBpackage.workspace\fR after +locating the manifest of a workspace member. .SH "OPTIONS" .sp \fB\-\-workspace\fR diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-remove.1 cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-remove.1 --- cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-remove.1 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-remove.1 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,189 @@ +'\" t +.TH "CARGO\-REMOVE" "1" +.nh +.ad l +.ss \n[.ss] 0 +.SH "NAME" +cargo\-remove \- Remove dependencies from a Cargo.toml manifest file +.SH "SYNOPSIS" +\fBcargo remove\fR [\fIoptions\fR] \fIdependency\fR\&... +.SH "DESCRIPTION" +Remove one or more dependencies from a \fBCargo.toml\fR manifest. +.SH "OPTIONS" +.SS "Section options" +.sp +\fB\-\-dev\fR +.RS 4 +Remove as a \fIdevelopment dependency\fR \&. +.RE +.sp +\fB\-\-build\fR +.RS 4 +Remove as a \fIbuild dependency\fR \&. +.RE +.sp +\fB\-\-target\fR \fItarget\fR +.RS 4 +Remove as a dependency to the \fIgiven target platform\fR \&. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-\-dry\-run\fR +.RS 4 +Don't actually write to the manifest. +.RE +.SS "Display Options" +.sp +\fB\-v\fR, +\fB\-\-verbose\fR +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fR +\fIconfig value\fR \&. +.RE +.sp +\fB\-q\fR, +\fB\-\-quiet\fR +.RS 4 +Do not print cargo log messages. +May also be specified with the \fBterm.quiet\fR +\fIconfig value\fR \&. +.RE +.sp +\fB\-\-color\fR \fIwhen\fR +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. +.RE +.sp +.RS 4 +\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fR +\fIconfig value\fR \&. +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fR \fIpath\fR +.RS 4 +Path to the \fBCargo.toml\fR file. By default, Cargo searches for the +\fBCargo.toml\fR file in the current directory or any parent directory. +.RE +.sp +\fB\-\-frozen\fR, +\fB\-\-locked\fR +.RS 4 +Either of these flags requires that the \fBCargo.lock\fR file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fR +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fR(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. +.RE +.SS "Package Selection" +.sp +\fB\-p\fR \fIspec\fR\&..., +\fB\-\-package\fR \fIspec\fR\&... +.RS 4 +Package to remove from. +.RE +.SS "Common Options" +.sp +\fB+\fR\fItoolchain\fR +.RS 4 +If Cargo has been installed with rustup, and the first argument to \fBcargo\fR +begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such +as \fB+stable\fR or \fB+nightly\fR). +See the \fIrustup documentation\fR +for more information about how toolchain overrides work. +.RE +.sp +\fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR +.RS 4 +Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, +or provided as a path to an extra configuration file. This flag may be specified multiple times. +See the \fIcommand\-line overrides section\fR for more information. +.RE +.sp +\fB\-h\fR, +\fB\-\-help\fR +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fR \fIflag\fR +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. +.RE +.SH "ENVIRONMENT" +See \fIthe reference\fR for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +.RS 4 +\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. +.RE +.sp +.RS 4 +\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +\h'-04' 1.\h'+01'Remove \fBregex\fR as a dependency +.sp +.RS 4 +.nf +cargo remove regex +.fi +.RE +.RE +.sp +.RS 4 +\h'-04' 2.\h'+01'Remove \fBtrybuild\fR as a dev\-dependency +.sp +.RS 4 +.nf +cargo remove \-\-dev trybuild +.fi +.RE +.RE +.sp +.RS 4 +\h'-04' 3.\h'+01'Remove \fBnom\fR from the \fBx86_64\-pc\-windows\-gnu\fR dependencies table +.sp +.RS 4 +.nf +cargo remove \-\-target x86_64\-pc\-windows\-gnu nom +.fi +.RE +.RE +.SH "SEE ALSO" +\fBcargo\fR(1), \fBcargo\-add\fR(1) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-tree.1 cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-tree.1 --- cargo-0.66.0+ds0ubuntu0.libgit2/src/etc/man/cargo-tree.1 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/src/etc/man/cargo-tree.1 2023-01-10 13:41:19.000000000 +0000 @@ -52,6 +52,21 @@ turn depends on \fBcfg\-if\fR with "default" features. When using \fB\-e features\fR it can be helpful to use \fB\-i\fR flag to show how the features flow into a package. See the examples below for more detail. +.SS "Feature Unification" +This command shows a graph much closer to a feature\-unified graph Cargo will +build, rather than what you list in \fBCargo.toml\fR\&. For instance, if you specify +the same dependency in both \fB[dependencies]\fR and \fB[dev\-dependencies]\fR but with +different features on. This command may merge all features and show a \fB(*)\fR on +one of the dependency to indicate the duplicate. +.sp +As a result, for a mostly equivalent overview of what \fBcargo build\fR does, +\fBcargo tree \-e normal,build\fR is pretty close; for a mostly equivalent overview +of what \fBcargo test\fR does, \fBcargo tree\fR is pretty close. However, it doesn't +guarantee the exact equivalence to what Cargo is going to build, since a +compilation is complex and depends on lots of different factors. +.sp +To learm more about feature unification, check out this +\fIdedicated section\fR \&. .SH "OPTIONS" .SS "Tree Options" .sp diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/alt_registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/alt_registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/alt_registry.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/alt_registry.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,9 +1,9 @@ //! Tests for alternative registries. -use cargo::util::IntoUrl; +use cargo_test_support::compare::assert_match_exact; use cargo_test_support::publish::validate_alt_upload; use cargo_test_support::registry::{self, Package, RegistryBuilder}; -use cargo_test_support::{basic_manifest, git, paths, project}; +use cargo_test_support::{basic_manifest, paths, project}; use std::fs; #[cargo_test] @@ -13,7 +13,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -62,7 +62,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -104,7 +104,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -146,7 +146,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -190,7 +190,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -224,7 +224,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -248,14 +248,13 @@ #[cargo_test] fn cannot_publish_to_crates_io_with_registry_dependency() { - registry::alt_init(); - let fakeio_path = paths::root().join("fake.io"); - let fakeio_url = fakeio_path.into_url().unwrap(); + let crates_io = registry::init(); + let _alternative = RegistryBuilder::new().alternative().build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -265,41 +264,22 @@ "#, ) .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - &format!( - r#" - [registries.fakeio] - index = "{}" - "#, - fakeio_url - ), - ) .build(); Package::new("bar", "0.0.1").alternative(true).publish(); - // Since this can't really call plain `publish` without fetching the real - // crates.io index, create a fake one that points to the real crates.io. - git::repo(&fakeio_path) - .file( - "config.json", - r#" - {"dl": "https://crates.io/api/v1/crates", "api": "https://crates.io"} - "#, - ) - .build(); - - // Login so that we have the token available - p.cargo("login --registry fakeio TOKEN").run(); - - p.cargo("publish --registry fakeio") + p.cargo("publish") + .replace_crates_io(crates_io.index_url()) .with_status(101) .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") .run(); - p.cargo("publish --token sekrit --index") - .arg(fakeio_url.to_string()) + p.cargo("publish") + .replace_crates_io(crates_io.index_url()) + .arg("--token") + .arg(crates_io.token()) + .arg("--index") + .arg(crates_io.index_url().as_str()) .with_status(101) .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") .run(); @@ -312,7 +292,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -376,7 +356,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -479,7 +459,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = ["me"] @@ -616,7 +596,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1042,7 +1022,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1215,7 +1195,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1262,7 +1242,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1309,3 +1289,99 @@ .run(); } } + +#[cargo_test] +fn sparse_lockfile() { + let _registry = registry::RegistryBuilder::new() + .http_index() + .alternative() + .build(); + Package::new("foo", "0.1.0").alternative(true).publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + + [dependencies] + foo = { registry = 'alternative', version = '0.1.0'} + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("-Zsparse-registry generate-lockfile") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .run(); + assert_match_exact( + &p.read_lockfile(), + r#"# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "a" +version = "0.5.0" +dependencies = [ + "foo", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "sparse+http://[..]/" +checksum = "f6a200a9339fef960979d94d5c99cbbfd899b6f5a396a55d9775089119050203""#, + ); +} + +#[cargo_test] +fn publish_with_transitive_dep() { + let _alt1 = RegistryBuilder::new() + .http_api() + .http_index() + .alternative_named("Alt-1") + .build(); + let _alt2 = RegistryBuilder::new() + .http_api() + .http_index() + .alternative_named("Alt-2") + .build(); + + let p1 = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + p1.cargo("publish -Zsparse-registry --registry Alt-1") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .run(); + + let p2 = project() + .file( + "Cargo.toml", + r#" + [package] + name = "b" + version = "0.6.0" + publish = ["Alt-2"] + + [dependencies] + a = { version = "0.5.0", registry = "Alt-1" } + "#, + ) + .file("src/lib.rs", "") + .build(); + p2.cargo("publish -Zsparse-registry") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/artifact_dep.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/artifact_dep.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/artifact_dep.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/artifact_dep.rs 2023-01-10 13:41:19.000000000 +0000 @@ -219,7 +219,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -332,7 +332,7 @@ .file( "Cargo.toml", &r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -435,7 +435,7 @@ .file( "Cargo.toml", &r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1872,7 +1872,9 @@ #[cargo_test] fn publish_artifact_dep() { - registry::init(); + // HACK below allows us to use a local registry + let registry = registry::init(); + Package::new("bar", "1.0.0").publish(); Package::new("baz", "1.0.0").publish(); @@ -1901,13 +1903,24 @@ .file("src/lib.rs", "") .build(); - p.cargo("publish -Z bindeps --no-verify --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish -Z bindeps --no-verify") + .replace_crates_io(registry.index_url()) .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] [..] ", ) .run(); @@ -1924,7 +1937,6 @@ "kind": "normal", "name": "bar", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" }, @@ -1934,7 +1946,6 @@ "kind": "build", "name": "baz", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } @@ -2181,7 +2192,7 @@ .file( "Cargo.toml", &r#" - [project] + [package] name = "foo" version = "0.0.1" resolver = "2" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/bad_config.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/bad_config.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/bad_config.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/bad_config.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,6 @@ //! Tests for some invalid .cargo/config files. -use cargo_test_support::registry::Package; +use cargo_test_support::registry::{self, Package}; use cargo_test_support::{basic_manifest, project, rustc_host}; #[cargo_test] @@ -19,8 +19,8 @@ .with_status(101) .with_stderr( "\ -[ERROR] invalid configuration for key `target.nonexistent-target` -expected a table, but found a string for `[..]` in [..]config +[ERROR] expected table for configuration key `target.nonexistent-target`, \ +but found string in [..]config ", ) .run(); @@ -62,6 +62,7 @@ #[cargo_test] fn bad3() { + let registry = registry::init(); let p = project() .file("src/lib.rs", "") .file( @@ -75,6 +76,7 @@ Package::new("foo", "1.0.0").publish(); p.cargo("publish -v") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -113,6 +115,7 @@ #[cargo_test] fn bad6() { + let registry = registry::init(); let p = project() .file("src/lib.rs", "") .file( @@ -126,6 +129,7 @@ Package::new("foo", "1.0.0").publish(); p.cargo("publish -v") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -209,7 +213,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -263,7 +267,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -654,7 +658,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -667,7 +671,7 @@ p.cargo("build") .with_stderr( "\ -warning: unused manifest key: project.bulid +warning: unused manifest key: package.bulid [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", @@ -679,7 +683,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/bench.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/bench.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/bench.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/bench.rs 2023-01-10 13:41:19.000000000 +0000 @@ -329,7 +329,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -452,7 +452,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -550,7 +550,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -695,7 +695,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1058,7 +1058,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" authors = [] version = "0.1.0" @@ -1216,7 +1216,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" authors = [] version = "0.1.0" @@ -1236,7 +1236,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" authors = [] version = "0.1.0" @@ -1266,7 +1266,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "baz" authors = [] version = "0.1.0" @@ -1305,7 +1305,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -1358,7 +1358,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -1403,7 +1403,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -1547,7 +1547,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/binary_name.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/binary_name.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/binary_name.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/binary_name.rs 2023-01-10 13:41:19.000000000 +0000 @@ -9,7 +9,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" @@ -44,7 +44,7 @@ r#" cargo-features = ["different-binary-name"] - [project] + [package] name = "foo" version = "0.0.1" @@ -126,7 +126,7 @@ r#" cargo-features = ["different-binary-name"] - [project] + [package] name = "foo" version = "0.0.1" @@ -212,7 +212,7 @@ r#" cargo-features = ["different-binary-name"] - [project] + [package] name = "foo" version = "0.0.1" @@ -264,7 +264,7 @@ r#" cargo-features = ["different-binary-name"] - [project] + [package] name = "foo" version = "0.0.1" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build_plan.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build_plan.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build_plan.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build_plan.rs 2023-01-10 13:41:19.000000000 +0000 @@ -126,7 +126,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -204,7 +204,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build.rs 2023-01-10 13:41:19.000000000 +0000 @@ -182,7 +182,7 @@ .file( "Cargo.toml", " - [project] + [package] foo = bar ", ) @@ -657,7 +657,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -700,7 +700,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -717,7 +717,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -763,7 +763,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -780,7 +780,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -826,7 +826,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -840,7 +840,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -890,7 +890,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -909,7 +909,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -1101,7 +1101,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" @@ -1147,7 +1147,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" @@ -1319,7 +1319,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.1-alpha.1" description = "This is foo" @@ -1477,7 +1477,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.1-alpha.1" authors = ["wycats@example.com", "neikos@example.com"] @@ -1526,7 +1526,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = ["escape='\"@example.com"] @@ -1606,7 +1606,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1639,7 +1639,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1666,7 +1666,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1771,7 +1771,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] @@ -1798,7 +1798,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] @@ -2966,7 +2966,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] @@ -3013,7 +3013,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] @@ -3054,7 +3054,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] @@ -3093,7 +3093,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -3123,7 +3123,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -3757,7 +3757,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -4087,7 +4087,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -4119,7 +4119,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -4152,7 +4152,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -4184,7 +4184,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -4217,7 +4217,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -4259,7 +4259,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -4506,7 +4506,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.1.0" @@ -4717,7 +4717,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" authors = [] version = "0.1.0" @@ -4755,7 +4755,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo-bar" authors = [] version = "0.1.0" @@ -4843,7 +4843,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -5110,7 +5110,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "testless" version = "0.1.0" @@ -5125,7 +5125,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -6247,3 +6247,32 @@ foo.cargo("test").run(); } + +#[cargo_test] +fn renamed_uplifted_artifact_remains_unmodified_after_rebuild() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + + let bin = p.bin("foo"); + let renamed_bin = p.bin("foo-renamed"); + + fs::rename(&bin, &renamed_bin).unwrap(); + + p.change_file("src/main.rs", "fn main() { eprintln!(\"hello, world\"); }"); + p.cargo("build").run(); + + let not_the_same = !same_file::is_same_file(bin, renamed_bin).unwrap(); + assert!(not_the_same, "renamed uplifted artifact must be unmodified"); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build_script_env.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build_script_env.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build_script_env.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build_script_env.rs 2023-01-10 13:41:19.000000000 +0000 @@ -173,3 +173,69 @@ .with_status(101) .run(); } + +#[cargo_test] +#[cfg(target_arch = "x86_64")] +fn build_script_sees_cfg_target_feature() { + let build_rs = r#" + fn main() { + let cfg = std::env::var("CARGO_CFG_TARGET_FEATURE").unwrap(); + eprintln!("CARGO_CFG_TARGET_FEATURE={cfg}"); + } + "#; + + let configs = [ + r#" + [build] + rustflags = ["-Ctarget-feature=+sse4.1,+sse4.2"] + "#, + r#" + [target.'cfg(target_arch = "x86_64")'] + rustflags = ["-Ctarget-feature=+sse4.1,+sse4.2"] + "#, + ]; + + for config in configs { + let p = project() + .file(".cargo/config.toml", config) + .file("src/lib.rs", r#""#) + .file("build.rs", build_rs) + .build(); + + p.cargo("build -vv") + .with_stderr_contains("[foo 0.0.1] CARGO_CFG_TARGET_FEATURE=[..]sse4.2[..]") + .with_stderr_contains("[..]-Ctarget-feature=[..]+sse4.2[..]") + .run(); + } +} + +/// In this test, the cfg is self-contradictory. There's no *right* answer as to +/// what the value of `RUSTFLAGS` should be in this case. We chose to give a +/// warning. However, no matter what we do, it's important that build scripts +/// and rustc see a consistent picture +#[cargo_test] +fn cfg_paradox() { + let build_rs = r#" + fn main() { + let cfg = std::env::var("CARGO_CFG_BERTRAND").is_ok(); + eprintln!("cfg!(bertrand)={cfg}"); + } + "#; + + let config = r#" + [target.'cfg(not(bertrand))'] + rustflags = ["--cfg=bertrand"] + "#; + + let p = project() + .file(".cargo/config.toml", config) + .file("src/lib.rs", r#""#) + .file("build.rs", build_rs) + .build(); + + p.cargo("build -vv") + .with_stderr_contains("[WARNING] non-trivial mutual dependency between target-specific configuration and RUSTFLAGS") + .with_stderr_contains("[foo 0.0.1] cfg!(bertrand)=true") + .with_stderr_contains("[..]--cfg=bertrand[..]") + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build_script.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build_script.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/build_script.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/build_script.rs 2023-01-10 13:41:19.000000000 +0000 @@ -18,7 +18,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -50,7 +50,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -67,7 +67,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -664,7 +664,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -694,7 +694,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -708,7 +708,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -753,7 +753,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -767,7 +767,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -812,7 +812,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -843,7 +843,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -859,7 +859,7 @@ .file( "a-sys/Cargo.toml", r#" - [project] + [package] name = "a-sys" version = "0.5.0" authors = [] @@ -947,7 +947,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -963,7 +963,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -978,7 +978,7 @@ .file( "a/a-sys/Cargo.toml", r#" - [project] + [package] name = "a-sys" version = "0.5.0" authors = [] @@ -1013,7 +1013,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1051,7 +1051,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1086,7 +1086,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1118,7 +1118,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1142,7 +1142,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1175,7 +1175,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1211,7 +1211,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1239,7 +1239,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1279,7 +1279,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1344,7 +1344,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1356,7 +1356,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1375,7 +1375,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -1415,7 +1415,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1427,7 +1427,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1450,7 +1450,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -1489,7 +1489,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1533,7 +1533,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1579,7 +1579,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1601,7 +1601,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1657,7 +1657,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1737,7 +1737,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1775,7 +1775,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1817,7 +1817,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1874,7 +1874,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1899,7 +1899,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.0" authors = [] @@ -1927,7 +1927,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1977,7 +1977,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2029,7 +2029,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2072,7 +2072,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2085,7 +2085,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2283,7 +2283,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -2347,7 +2347,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2714,7 +2714,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2728,7 +2728,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -2740,7 +2740,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2795,7 +2795,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2810,7 +2810,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2823,7 +2823,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -2835,7 +2835,7 @@ .file( "c/Cargo.toml", r#" - [project] + [package] name = "c" version = "0.5.0" authors = [] @@ -2879,7 +2879,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2940,7 +2940,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3002,7 +3002,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3054,7 +3054,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3110,7 +3110,7 @@ .file( "awoo/Cargo.toml", r#" - [project] + [package] name = "awoo" version = "0.5.0" build = "build.rs" @@ -3129,7 +3129,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "meow" version = "0.5.0" [dependencies] @@ -3189,7 +3189,7 @@ .file( "awoo/Cargo.toml", r#" - [project] + [package] name = "awoo" version = "0.5.0" build = "build.rs" @@ -3208,7 +3208,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "meow" version = "0.5.0" [dependencies] @@ -3250,7 +3250,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3361,7 +3361,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3373,7 +3373,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -3405,7 +3405,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3427,7 +3427,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -3457,7 +3457,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3494,7 +3494,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3514,7 +3514,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -3534,7 +3534,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3554,7 +3554,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -3572,7 +3572,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -3596,7 +3596,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3636,7 +3636,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3678,7 +3678,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -3692,7 +3692,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3737,7 +3737,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -3751,7 +3751,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3789,7 +3789,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -3833,7 +3833,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -4055,7 +4055,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "dep1" version = "0.1.0" authors = [] @@ -4076,7 +4076,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "dep2" version = "0.1.0" authors = [] @@ -4097,7 +4097,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "dep3" version = "0.1.0" authors = [] @@ -4118,7 +4118,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "dep4" version = "0.1.0" authors = [] @@ -4140,7 +4140,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -4172,7 +4172,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -4191,7 +4191,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -4204,7 +4204,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -4258,7 +4258,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -4399,7 +4399,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/dev_build_conflict/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/dev_build_conflict/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/dev_build_conflict/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/dev_build_conflict/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,8 +1,7 @@ error: The argument '--dev' cannot be used with '--build' -USAGE: - cargo add [OPTIONS] [@] ... - cargo add [OPTIONS] --path ... - cargo add [OPTIONS] --git ... +Usage: cargo add [OPTIONS] [@] ... + cargo add [OPTIONS] --path ... + cargo add [OPTIONS] --git ... -For more information try --help +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,9 +1,5 @@ Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. - Features: - + noze - - ears - - eyes - - mouth - - nose -error: unrecognized features: ["noze"] +error: unrecognized feature for crate your-face: noze +disabled features: + ears, eyes, mouth, nose diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] + +[package] +name = "cargo-list-test-fixture" +version = "0.0.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::prelude::*; +use cargo_test_support::Project; + +use crate::cargo_add::init_registry; +use cargo_test_support::curr_dir; + +#[cargo_test] +fn features_unknown_no_features() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("add") + .arg_line("my-package --features noze") + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] + +[package] +name = "cargo-list-test-fixture" +version = "0.0.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/features_unknown_no_features/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,4 @@ + Updating `dummy-registry` index + Adding my-package v99999.0.0 to dependencies. +error: unrecognized feature for crate my-package: noze +no features available for crate my-package diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_arg/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_arg/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_arg/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_arg/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,10 +1,9 @@ error: Found argument '--flag' which wasn't expected, or isn't valid in this context - If you tried to supply `--flag` as a value rather than a flag, use `-- --flag` + If you tried to supply '--flag' as a value rather than a flag, use '-- --flag' -USAGE: - cargo add [OPTIONS] [@] ... - cargo add [OPTIONS] --path ... - cargo add [OPTIONS] --git ... +Usage: cargo add [OPTIONS] [@] ... + cargo add [OPTIONS] --path ... + cargo add [OPTIONS] --git ... -For more information try --help +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_target_empty/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_target_empty/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_target_empty/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/invalid_target_empty/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,3 +1,3 @@ error: The argument '--target ' requires a value but none was supplied -For more information try --help +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/mod.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -20,6 +20,7 @@ mod features_preserve; mod features_spaced_values; mod features_unknown; +mod features_unknown_no_features; mod git; mod git_branch; mod git_conflicts_namever; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/no_args/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/no_args/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/no_args/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/no_args/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,9 +1,8 @@ error: The following required arguments were not provided: - |--git > + |--git > -USAGE: - cargo add [OPTIONS] [@] ... - cargo add [OPTIONS] --path ... - cargo add [OPTIONS] --git ... +Usage: cargo add [OPTIONS] [@] ... + cargo add [OPTIONS] --path ... + cargo add [OPTIONS] --git ... -For more information try --help +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -6,9 +6,15 @@ default-base = [] default-test-base = [] default-merge-base = [] -default = ["default-base", "default-test-base", "default-merge-base"] +long-feature-name-because-of-formatting-reasons = [] +default = [ + "default-base", + "default-test-base", + "default-merge-base", + "long-feature-name-because-of-formatting-reasons", +] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] -unrelated = [] \ No newline at end of file +unrelated = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -6,9 +6,15 @@ default-base = [] default-test-base = [] default-merge-base = [] -default = ["default-base", "default-test-base", "default-merge-base"] +long-feature-name-because-of-formatting-reasons = [] +default = [ + "default-base", + "default-test-base", + "default-merge-base", + "long-feature-name-because-of-formatting-reasons", +] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] -unrelated = [] \ No newline at end of file +unrelated = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,12 +1,7 @@ Adding foo (workspace) to dependencies. - Features as of v0.0.0: - + default-base - + default-merge-base - + default-test-base - + not_recognized - + test - + test-base - - merge - - merge-base - - unrelated -error: unrecognized features: ["not_recognized"] +error: unrecognized feature for crate foo: not_recognized +disabled features: + merge, merge-base, unrelated +enabled features: + default-base, default-merge-base, default-test-base + long-feature-name-because-of-formatting-reasons, test, test-base diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_alias_config.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_alias_config.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_alias_config.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_alias_config.rs 2023-01-10 13:41:19.000000000 +0000 @@ -21,7 +21,7 @@ p.cargo("b-cargo-test -v") .with_status(101) - .with_stderr_contains( + .with_stderr( "\ [ERROR] invalid configuration for key `alias.b-cargo-test` expected a list, but found a integer for [..]", @@ -30,6 +30,80 @@ } #[cargo_test] +fn alias_malformed_config_string() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = ` + "#, + ) + .build(); + + p.cargo("b-cargo-test -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] could not load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]/config` + +Caused by: + [..] + +Caused by: + TOML parse error at line [..] + | + 3 | b-cargo-test = ` + | ^ + Unexpected ``` + Expected quoted string +", + ) + .run(); +} + +#[cargo_test] +fn alias_malformed_config_list() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = [1, 2] + "#, + ) + .build(); + + p.cargo("b-cargo-test -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] could not load Cargo configuration + +Caused by: + failed to load TOML configuration from `[..]/config` + +Caused by: + [..] `alias` + +Caused by: + [..] `b-cargo-test` + +Caused by: + expected string but found integer in list +", + ) + .run(); +} + +#[cargo_test] fn alias_config() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) @@ -77,6 +151,30 @@ } #[cargo_test] +fn builtin_alias_shadowing_external_subcommand() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .executable("cargo-t", "") + .build(); + + let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); + paths.push(p.root()); + let path = env::join_paths(paths).unwrap(); + + p.cargo("t") + .env("PATH", &path) + .with_stderr( + "\ +[COMPILING] foo v0.5.0 [..] +[FINISHED] test [unoptimized + debuginfo] target(s) in [..] +[RUNNING] unittests src/main.rs [..] +", + ) + .run(); +} + +#[cargo_test] fn alias_shadowing_external_subcommand() { let echo = echo_subcommand(); let p = project() @@ -324,11 +422,12 @@ .build(); p.cargo("-- check --invalid_argument -some-other-argument") + .with_status(101) .with_stderr( "\ -[WARNING] trailing arguments after built-in command `check` are ignored: `--invalid_argument -some-other-argument` -[CHECKING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[ERROR] trailing arguments after built-in command `check` are unsupported: `--invalid_argument -some-other-argument` + +To pass the arguments to the subcommand, remove `--` ", ) .run(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_command.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_command.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_command.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_command.rs 2023-01-10 13:41:19.000000000 +0000 @@ -156,7 +156,7 @@ .with_status(101) .with_stderr_contains( "\ -error: no such subcommand: `C` +error: no such command: `C` Did you mean `c`? ", @@ -170,7 +170,7 @@ .with_status(101) .with_stderr_contains( "\ -error: no such subcommand: `B` +error: no such command: `B` Did you mean `b`? ", @@ -184,7 +184,7 @@ .with_status(101) .with_stderr_contains( "\ -error: no such subcommand: `biuld` +error: no such command: `biuld` Did you mean `build`? ", @@ -235,7 +235,7 @@ .with_status(101) .with_stderr_contains( "\ -error: no such subcommand: `myalais` +error: no such command: `myalais` Did you mean `myalias`? ", @@ -247,7 +247,7 @@ .with_status(101) .with_stderr_contains( "\ -error: no such subcommand: `myalais` +error: no such command: `myalais` ", ) .with_stderr_does_not_contain( @@ -266,7 +266,7 @@ .with_status(101) .with_stderr( "\ -[ERROR] no such subcommand: `there-is-no-way-that-there-is-a-command-close-to-this` +[ERROR] no such command: `there-is-no-way-that-there-is-a-command-close-to-this` View all installed commands with `cargo --list`", ) @@ -279,7 +279,7 @@ .with_status(101) .with_stderr( "\ -[ERROR] no such subcommand: `invalid-command` +[ERROR] no such command: `invalid-command` View all installed commands with `cargo --list`", ) @@ -473,7 +473,7 @@ .with_status(101) .with_stderr( "\ -error: no such subcommand: `+nightly` +error: no such command: `+nightly` Cargo does not handle `+toolchain` directives. Did you mean to invoke `cargo` through `rustup` instead?", @@ -487,7 +487,7 @@ .with_status(101) .with_stderr( "\ -error: no such subcommand: `bluid` +error: no such command: `bluid` Did you mean `build`? diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_features.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_features.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_features.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_features.rs 2023-01-10 13:41:19.000000000 +0000 @@ -610,7 +610,7 @@ #[cargo_test] fn publish_allowed() { - registry::init(); + let registry = registry::init(); let p = project() .file( @@ -626,7 +626,8 @@ ) .file("src/lib.rs", "") .build(); - p.cargo("publish --token sekrit") + p.cargo("publish") + .replace_crates_io(registry.index_url()) .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .run(); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["clippy"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing clippy from dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--build", "semver"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,21 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/build/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing semver from build-dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--dev", "regex"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dev/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing regex from dev-dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["semver", "--dry-run"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/out/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/dry_run/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing semver from dependencies +warning: aborting remove due to dry run diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["foo", "--flag"]) + .current_dir(cwd) + .assert() + .code(1) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_arg/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,7 @@ +error: Found argument '--flag' which wasn't expected, or isn't valid in this context + + If you tried to supply '--flag' as a value rather than a flag, use '-- --flag' + +Usage: cargo[EXE] remove ... + +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["invalid_dependency_name"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_dep/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing invalid_dependency_name from dependencies +error: the dependency `invalid_dependency_name` could not be found in `dependencies`. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/in/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["docopt", "--package", "dep-c"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/out/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ +error: package(s) `dep-c` not found in workspace `[ROOT]/case` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/in/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["docopt"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/out/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_package_multiple/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ +error: 2 packages selected. Please specify one with `-p ` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--build", "docopt"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing docopt from build-dependencies +error: the dependency `docopt` could not be found in `build-dependencies`. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--dev", "semver", "regex"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_section_dep/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing semver from dev-dependencies +error: the dependency `semver` could not be found in `dev-dependencies`. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--target", "powerpc-unknown-linux-gnu", "dbus"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing dbus from dependencies for target `powerpc-unknown-linux-gnu` +error: the dependency `dbus` could not be found in `target.powerpc-unknown-linux-gnu.dependencies`. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--target", "x86_64-unknown-linux-gnu", "toml"]) + .current_dir(cwd) + .assert() + .code(101) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/invalid_target_dep/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing toml from dependencies for target `x86_64-unknown-linux-gnu` +error: the dependency `toml` could not be found in `target.x86_64-unknown-linux-gnu.dependencies`. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,82 @@ +mod avoid_empty_tables; +mod build; +mod dev; +mod dry_run; +mod invalid_arg; +mod invalid_dep; +mod invalid_package; +mod invalid_package_multiple; +mod invalid_section; +mod invalid_section_dep; +mod invalid_target; +mod invalid_target_dep; +mod multiple_deps; +mod multiple_dev; +mod no_arg; +mod offline; +mod optional_dep_feature; +mod optional_feature; +mod package; +mod remove_basic; +mod target; +mod target_build; +mod target_dev; +mod update_lock_file; + +fn init_registry() { + cargo_test_support::registry::init(); + add_registry_packages(false); +} + +fn add_registry_packages(alt: bool) { + for name in [ + "clippy", + "dbus", + "docopt", + "ncurses", + "pad", + "regex", + "rustc-serialize", + "toml", + ] { + cargo_test_support::registry::Package::new(name, "0.1.1+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "0.2.0+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "0.2.3+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "0.4.1+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "0.6.2+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "0.9.9+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "1.0.90+my-package") + .alternative(alt) + .publish(); + cargo_test_support::registry::Package::new(name, "20.0.0+my-package") + .alternative(alt) + .publish(); + } + + for name in ["semver", "serde"] { + cargo_test_support::registry::Package::new(name, "0.1.1") + .alternative(alt) + .feature("std", &[]) + .publish(); + cargo_test_support::registry::Package::new(name, "0.9.0") + .alternative(alt) + .feature("std", &[]) + .publish(); + cargo_test_support::registry::Package::new(name, "1.0.90") + .alternative(alt) + .feature("std", &[]) + .publish(); + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["docopt", "semver"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,22 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +rustc-serialize = "0.4" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_deps/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,3 @@ + Removing docopt from dependencies + Removing semver from dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--dev", "regex", "serde"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,20 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[features] +std = ["semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/multiple_dev/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,3 @@ + Removing regex from dev-dependencies + Removing serde from dev-dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .current_dir(cwd) + .assert() + .code(1) + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/no_arg/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,6 @@ +error: The following required arguments were not provided: + ... + +Usage: cargo[EXE] remove ... + +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,32 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + // run the metadata command to populate the cache + snapbox::cmd::Command::cargo_ui() + .arg("metadata") + .current_dir(cwd) + .assert() + .success(); + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["docopt", "--offline"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/offline/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + Removing docopt from dependencies diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--dev", "serde"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" + +[features] +std = ["semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing serde from dev-dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["semver"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/optional_feature/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing semver from dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/in/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["docopt", "--package", "dep-a"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,22 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/out/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/package/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing docopt from dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["docopt"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove_basic/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing docopt from dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-basic.in/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,5 @@ +[workspace] +members = [ + "dep-a", + "dep-b" +] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-a" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-a/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "dep-b" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-package.in/dep-b/src/lib.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/remove-target.in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--target", "x86_64-unknown-linux-gnu", "dbus"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,30 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing dbus from dependencies for target `x86_64-unknown-linux-gnu` + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--build", "--target", "x86_64-unknown-linux-gnu", "semver"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,30 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_build/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing semver from build-dependencies for target `x86_64-unknown-linux-gnu` + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,33 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[target.x86_64-unknown-linux-gnu.dev-dependencies] +ncurses = "20.0" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["--dev", "--target", "x86_64-unknown-linux-gnu", "ncurses"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,30 @@ +[package] +name = "cargo-remove-target-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[target.x86_64-unknown-freebsd.build-dependencies] +semver = "0.1.0" + +[target.x86_64-unknown-linux-gnu.build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[target.x86_64-unknown-linux-gnu.dependencies] +dbus = "0.6.2" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/target_dev/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing ncurses from dev-dependencies for target `x86_64-unknown-linux-gnu` + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,58 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cargo-remove-test-fixture" +version = "0.1.0" +dependencies = [ + "clippy", + "docopt", + "regex", + "rustc-serialize", + "semver", + "serde", + "toml", +] + +[[package]] +name = "clippy" +version = "0.4.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47ced0eda54e9ddc6063f0e1d0164493cd16c84c6b6a0329a536967c44e205f7" + +[[package]] +name = "docopt" +version = "0.6.2+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b600540c4fafb27bf6e6961f0f1e6f547c9d6126ce581ab3a92f878c8e2c9a2c" + +[[package]] +name = "regex" +version = "0.1.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84949cb53285a6c481d0133065a7b669871acfd9e20f273f4ce1283c309775d5" + +[[package]] +name = "rustc-serialize" +version = "0.4.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31162e7d23a085553c42dee375787b451a481275473f7779c4a63bcc267a24fd" + +[[package]] +name = "semver" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3031434e07edc922bf1b8262f075fac1522694f17b1ee7ad314c4cabd5d2723f" + +[[package]] +name = "serde" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75d9264696ebbf5315a6b068e9910c4df9274365afac2d88abf66525df660218" + +[[package]] +name = "toml" +version = "0.1.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0f6c7804525ce0a968ef270e55a516cf4bdcf1fea0b09d130e0aa34a66745b3" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,24 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +rustc-serialize = "0.4" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/in/src/main.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ +fn main() {} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/mod.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,25 @@ +use cargo_test_support::compare::assert_ui; +use cargo_test_support::curr_dir; +use cargo_test_support::CargoCommand; +use cargo_test_support::Project; + +use crate::cargo_remove::init_registry; + +#[cargo_test] +fn case() { + init_registry(); + let project = Project::from_template(curr_dir!().join("in")); + let project_root = project.root(); + let cwd = &project_root; + + snapbox::cmd::Command::cargo_ui() + .arg("remove") + .args(["rustc-serialize"]) + .current_dir(cwd) + .assert() + .success() + .stdout_matches_path(curr_dir!().join("stdout.log")) + .stderr_matches_path(curr_dir!().join("stderr.log")); + + assert_ui().subset_matches(curr_dir!().join("out"), &project_root); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,51 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cargo-remove-test-fixture" +version = "0.1.0" +dependencies = [ + "clippy", + "docopt", + "regex", + "semver", + "serde", + "toml", +] + +[[package]] +name = "clippy" +version = "0.4.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47ced0eda54e9ddc6063f0e1d0164493cd16c84c6b6a0329a536967c44e205f7" + +[[package]] +name = "docopt" +version = "0.6.2+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b600540c4fafb27bf6e6961f0f1e6f547c9d6126ce581ab3a92f878c8e2c9a2c" + +[[package]] +name = "regex" +version = "0.1.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84949cb53285a6c481d0133065a7b669871acfd9e20f273f4ce1283c309775d5" + +[[package]] +name = "semver" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3031434e07edc922bf1b8262f075fac1522694f17b1ee7ad314c4cabd5d2723f" + +[[package]] +name = "serde" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75d9264696ebbf5315a6b068e9910c4df9274365afac2d88abf66525df660218" + +[[package]] +name = "toml" +version = "0.1.1+my-package" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0f6c7804525ce0a968ef270e55a516cf4bdcf1fea0b09d130e0aa34a66745b3" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.toml 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,23 @@ +[package] +name = "cargo-remove-test-fixture" +version = "0.1.0" + +[[bin]] +name = "main" +path = "src/main.rs" + +[build-dependencies] +semver = "0.1.0" + +[dependencies] +docopt = "0.6" +semver = "0.1" +toml = "0.1" +clippy = "0.4" + +[dev-dependencies] +regex = "0.1.1" +serde = "1.0.90" + +[features] +std = ["serde/std", "semver/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/out/src/main.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1 @@ +fn main() {} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/stderr.log 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cargo_remove/update_lock_file/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,2 @@ + Removing rustc-serialize from dependencies + Updating `dummy-registry` index diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/check_cfg.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/check_cfg.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/check_cfg.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/check_cfg.rs 2023-01-10 13:41:19.000000000 +0000 @@ -35,7 +35,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -59,7 +59,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -89,7 +89,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -120,7 +120,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -175,7 +175,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -201,7 +201,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -251,7 +251,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -275,7 +275,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -355,7 +355,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -443,7 +443,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -532,7 +532,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -565,7 +565,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" "#, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/check.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/check.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/check.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/check.rs 2023-01-10 13:41:19.000000000 +0000 @@ -6,7 +6,7 @@ use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::tools; -use cargo_test_support::{basic_manifest, project}; +use cargo_test_support::{basic_manifest, git, project}; #[cargo_test] fn check_success() { @@ -941,7 +941,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -997,7 +997,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1024,3 +1024,155 @@ .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]") .run(); } + +#[cargo_test] +fn warn_manifest_package_and_project() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [project] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("check") + .with_stderr( + "\ +[WARNING] manifest at `[CWD]` contains both `project` and `package`, this could become a hard error in the future +[CHECKING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn git_manifest_package_and_project() { + let p = project(); + let git_project = git::new("bar", |p| { + p.file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [project] + name = "bar" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + }); + + let p = p + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies.bar] + version = "0.0.1" + git = '{}' + + "#, + git_project.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("check") + .with_stderr( + "\ +[UPDATING] git repository `[..]` +[CHECKING] bar v0.0.1 ([..]) +[CHECKING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn warn_manifest_with_project() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("check") + .with_stderr( + "\ +[WARNING] manifest at `[CWD]` contains `[project]` instead of `[package]`, this could become a hard error in the future +[CHECKING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn git_manifest_with_project() { + let p = project(); + let git_project = git::new("bar", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + }); + + let p = p + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies.bar] + version = "0.0.1" + git = '{}' + + "#, + git_project.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("check") + .with_stderr( + "\ +[UPDATING] git repository `[..]` +[CHECKING] bar v0.0.1 ([..]) +[CHECKING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/config_cli.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/config_cli.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/config_cli.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/config_cli.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,8 @@ //! Tests for the --config CLI option. -use super::config::{assert_error, assert_match, read_output, write_config, ConfigBuilder}; +use super::config::{ + assert_error, assert_match, read_output, write_config, write_config_at, ConfigBuilder, +}; use cargo::util::config::Definition; use cargo_test_support::paths; use std::{collections::HashMap, fs}; @@ -54,6 +56,72 @@ } #[cargo_test] +fn merge_primitives_for_multiple_cli_occurences() { + let config_path0 = ".cargo/file0.toml"; + write_config_at(config_path0, "k = 'file0'"); + let config_path1 = ".cargo/file1.toml"; + write_config_at(config_path1, "k = 'file1'"); + + // k=env0 + let config = ConfigBuilder::new().env("CARGO_K", "env0").build(); + assert_eq!(config.get::("k").unwrap(), "env0"); + + // k=env0 + // --config k='cli0' + // --config k='cli1' + let config = ConfigBuilder::new() + .env("CARGO_K", "env0") + .config_arg("k='cli0'") + .config_arg("k='cli1'") + .build(); + assert_eq!(config.get::("k").unwrap(), "cli1"); + + // Env has a lower priority when comparing with file from CLI arg. + // + // k=env0 + // --config k='cli0' + // --config k='cli1' + // --config .cargo/file0.toml + let config = ConfigBuilder::new() + .env("CARGO_K", "env0") + .config_arg("k='cli0'") + .config_arg("k='cli1'") + .config_arg(config_path0) + .build(); + assert_eq!(config.get::("k").unwrap(), "file0"); + + // k=env0 + // --config k='cli0' + // --config k='cli1' + // --config .cargo/file0.toml + // --config k='cli2' + let config = ConfigBuilder::new() + .env("CARGO_K", "env0") + .config_arg("k='cli0'") + .config_arg("k='cli1'") + .config_arg(config_path0) + .config_arg("k='cli2'") + .build(); + assert_eq!(config.get::("k").unwrap(), "cli2"); + + // k=env0 + // --config k='cli0' + // --config k='cli1' + // --config .cargo/file0.toml + // --config k='cli2' + // --config .cargo/file1.toml + let config = ConfigBuilder::new() + .env("CARGO_K", "env0") + .config_arg("k='cli0'") + .config_arg("k='cli1'") + .config_arg(config_path0) + .config_arg("k='cli2'") + .config_arg(config_path1) + .build(); + assert_eq!(config.get::("k").unwrap(), "file1"); +} + +#[cargo_test] fn merges_array() { // Array entries are appended. write_config( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/config_include.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/config_include.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/config_include.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/config_include.rs 2023-01-10 13:41:19.000000000 +0000 @@ -254,3 +254,31 @@ expected array, but found string", ); } + +#[cargo_test] +fn cli_include_take_priority_over_env() { + write_config_at(".cargo/include.toml", "k='include'"); + + // k=env + let config = ConfigBuilder::new().env("CARGO_K", "env").build(); + assert_eq!(config.get::("k").unwrap(), "env"); + + // k=env + // --config 'include=".cargo/include.toml"' + let config = ConfigBuilder::new() + .env("CARGO_K", "env") + .unstable_flag("config-include") + .config_arg("include='.cargo/include.toml'") + .build(); + assert_eq!(config.get::("k").unwrap(), "include"); + + // k=env + // --config '.cargo/foo.toml' + write_config_at(".cargo/foo.toml", "include='include.toml'"); + let config = ConfigBuilder::new() + .env("CARGO_K", "env") + .unstable_flag("config-include") + .config_arg(".cargo/foo.toml") + .build(); + assert_eq!(config.get::("k").unwrap(), "include"); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/config.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/config.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/config.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/config.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,7 @@ //! Tests for config settings. use cargo::core::{PackageIdSpec, Shell}; -use cargo::util::config::{self, Config, SslVersionConfig, StringList}; +use cargo::util::config::{self, Config, Definition, SslVersionConfig, StringList}; use cargo::util::interning::InternedString; use cargo::util::toml::{self, VecStringOrBool as VSOB}; use cargo::CargoResult; @@ -1074,10 +1074,6 @@ ", ); - assert!(config - .get::>("http.ssl-version") - .unwrap() - .is_none()); } #[cargo_test] @@ -1512,3 +1508,59 @@ let roundtrip_toml = toml_edit::easy::to_string(&roundtrip).unwrap(); compare::assert_match_exact(&profile_toml, &roundtrip_toml); } + +#[cargo_test] +fn value_in_array() { + // Value in an array should work + let root_path = paths::root().join(".cargo/config.toml"); + write_config_at( + &root_path, + "\ +[net.ssh] +known-hosts = [ + \"example.com ...\", + \"example.net ...\", +] +", + ); + + let foo_path = paths::root().join("foo/.cargo/config.toml"); + write_config_at( + &foo_path, + "\ +[net.ssh] +known-hosts = [ + \"example.org ...\", +] +", + ); + + let config = ConfigBuilder::new() + .cwd("foo") + // environment variables don't actually work for known-hosts due to + // space splitting, but this is included here just to validate that + // they work (particularly if other Vec config vars are added + // in the future). + .env("CARGO_NET_SSH_KNOWN_HOSTS", "env-example") + .build(); + let net_config = config.net_config().unwrap(); + let kh = net_config + .ssh + .as_ref() + .unwrap() + .known_hosts + .as_ref() + .unwrap(); + assert_eq!(kh.len(), 4); + assert_eq!(kh[0].val, "example.org ..."); + assert_eq!(kh[0].definition, Definition::Path(foo_path.clone())); + assert_eq!(kh[1].val, "example.com ..."); + assert_eq!(kh[1].definition, Definition::Path(root_path.clone())); + assert_eq!(kh[2].val, "example.net ..."); + assert_eq!(kh[2].definition, Definition::Path(root_path.clone())); + assert_eq!(kh[3].val, "env-example"); + assert_eq!( + kh[3].definition, + Definition::Environment("CARGO_NET_SSH_KNOWN_HOSTS".to_string()) + ); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/corrupt_git.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/corrupt_git.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/corrupt_git.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/corrupt_git.rs 2023-01-10 13:41:19.000000000 +0000 @@ -20,7 +20,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -78,7 +78,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/credential_process.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/credential_process.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/credential_process.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/credential_process.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,6 @@ //! Tests for credential-process. -use cargo_test_support::registry::TestRegistry; +use cargo_test_support::registry::{Package, TestRegistry}; use cargo_test_support::{basic_manifest, cargo_process, paths, project, registry, Project}; use std::fs; @@ -15,7 +15,7 @@ .no_configure_token() .build(); - let _cratesio = registry::RegistryBuilder::new() + let cratesio = registry::RegistryBuilder::new() .no_configure_token() .build(); @@ -32,6 +32,7 @@ .build(); p.cargo("publish --no-verify") + .replace_crates_io(cratesio.index_url()) .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( @@ -93,6 +94,16 @@ .file("src/lib.rs", "") .build(); + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .alternative(true) + .publish(); + p.cargo("publish --no-verify --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) @@ -124,6 +135,7 @@ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] [..] ", ) .run(); @@ -138,7 +150,7 @@ /// * Create a simple `foo` project to run the test against. /// * Configure the credential-process config. /// -/// Returns returns the simple `foo` project to test against and the API server handle. +/// Returns the simple `foo` project to test against and the API server handle. fn get_token_test() -> (Project, TestRegistry) { // API server that checks that the token is included correctly. let server = registry::RegistryBuilder::new() @@ -147,7 +159,6 @@ .alternative() .http_api() .build(); - // The credential process to use. let cred_proj = project() .at("cred_proj") @@ -198,6 +209,7 @@ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] [..] ", ) .run(); @@ -206,7 +218,7 @@ #[cargo_test] fn basic_unsupported() { // Non-action commands don't support login/logout. - let _server = registry::RegistryBuilder::new() + let registry = registry::RegistryBuilder::new() .no_configure_token() .build(); cargo_util::paths::append( @@ -219,6 +231,7 @@ .unwrap(); cargo_process("login -Z credential-process abcdefg") + .replace_crates_io(registry.index_url()) .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( @@ -232,6 +245,7 @@ .run(); cargo_process("logout -Z credential-process") + .replace_crates_io(registry.index_url()) .masquerade_as_nightly_cargo(&["credential-process", "cargo-logout"]) .with_status(101) .with_stderr( @@ -288,6 +302,7 @@ cargo_process("login -Z credential-process abcdefg") .masquerade_as_nightly_cargo(&["credential-process"]) + .replace_crates_io(server.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -303,7 +318,7 @@ #[cargo_test] fn logout() { - let _server = registry::RegistryBuilder::new() + let server = registry::RegistryBuilder::new() .no_configure_token() .build(); // The credential process to use. @@ -342,6 +357,7 @@ cargo_process("logout -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process", "cargo-logout"]) + .replace_crates_io(server.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -389,7 +405,7 @@ #[cargo_test] fn libexec_path() { // cargo: prefixed names use the sysroot - let _server = registry::RegistryBuilder::new() + let server = registry::RegistryBuilder::new() .no_configure_token() .build(); cargo_util::paths::append( @@ -403,6 +419,7 @@ cargo_process("login -Z credential-process abcdefg") .masquerade_as_nightly_cargo(&["credential-process"]) + .replace_crates_io(server.index_url()) .with_status(101) .with_stderr( // FIXME: Update "Caused by" error message once rust/pull/87704 is merged. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cross_compile.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cross_compile.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cross_compile.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cross_compile.rs 2023-01-10 13:41:19.000000000 +0000 @@ -507,7 +507,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" authors = [] version = "0.0.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cross_publish.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cross_publish.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/cross_publish.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/cross_publish.rs 2023-01-10 13:41:19.000000000 +0000 @@ -66,7 +66,8 @@ return; } - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( @@ -97,17 +98,19 @@ let target = cross_compile::alternate(); - p.cargo("publish --token sekrit") + p.cargo("publish") + .replace_crates_io(registry.index_url()) .arg("--target") .arg(&target) .with_stderr( "\ -[UPDATING] `dummy-registry` index +[UPDATING] crates.io index [PACKAGING] foo v0.0.0 ([CWD]) [VERIFYING] foo v0.0.0 ([CWD]) [COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [UPLOADING] foo v0.0.0 ([CWD]) +[UPDATING] crates.io index ", ) .run(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/doc.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/doc.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/doc.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/doc.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1110,7 +1110,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -1246,7 +1246,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" @@ -2057,7 +2057,7 @@ .file( "crate-a/Cargo.toml", r#" - [project] + [package] name = "crate-a" version = "0.1.0" "#, @@ -2073,7 +2073,7 @@ .file( "crate-b/Cargo.toml", r#" - [project] + [package] name = "crate-b" version = "0.1.0" "#, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/features_namespaced.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/features_namespaced.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/features_namespaced.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/features_namespaced.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,7 +1,7 @@ //! Tests for namespaced features. use super::features2::switch_to_resolver_2; -use cargo_test_support::registry::{Dependency, Package}; +use cargo_test_support::registry::{self, Dependency, Package}; use cargo_test_support::{project, publish}; #[cargo_test] @@ -858,6 +858,9 @@ #[cargo_test] fn publish_no_implicit() { + // HACK below allows us to use a local registry + let registry = registry::init(); + // Does not include implicit features or dep: syntax on publish. Package::new("opt-dep1", "1.0.0").publish(); Package::new("opt-dep2", "1.0.0").publish(); @@ -884,12 +887,23 @@ .file("src/lib.rs", "") .build(); - p.cargo("publish --no-verify --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] [..] ", ) .run(); @@ -907,7 +921,6 @@ "kind": "normal", "name": "opt-dep1", "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" }, @@ -917,7 +930,6 @@ "kind": "normal", "name": "opt-dep2", "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } @@ -971,6 +983,9 @@ #[cargo_test] fn publish() { + // HACK below allows us to use a local registry + let registry = registry::init(); + // Publish behavior with explicit dep: syntax. Package::new("bar", "1.0.0").publish(); let p = project() @@ -996,7 +1011,17 @@ .file("src/lib.rs", "") .build(); - p.cargo("publish --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -1005,6 +1030,7 @@ [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] [..] ", ) .run(); @@ -1022,7 +1048,6 @@ "kind": "normal", "name": "bar", "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } @@ -1073,3 +1098,129 @@ )], ); } + +#[cargo_test] +fn namespaced_feature_together() { + // Check for an error when `dep:` is used with `/` + Package::new("bar", "1.0.0") + .feature("bar-feat", &[]) + .publish(); + + // Non-optional shouldn't have extra err. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = "1.0" + + [features] + f1 = ["dep:bar/bar-feat"] + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("check") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` + +Caused by: + feature `f1` includes `dep:bar/bar-feat` with both `dep:` and `/` + To fix this, remove the `dep:` prefix. +", + ) + .run(); + + // Weak dependency shouldn't have extra err. + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = {version = "1.0", optional = true } + + [features] + f1 = ["dep:bar?/bar-feat"] + "#, + ); + p.cargo("check") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` + +Caused by: + feature `f1` includes `dep:bar?/bar-feat` with both `dep:` and `/` + To fix this, remove the `dep:` prefix. +", + ) + .run(); + + // If dep: is already specified, shouldn't have extra err. + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = {version = "1.0", optional = true } + + [features] + f1 = ["dep:bar", "dep:bar/bar-feat"] + "#, + ); + p.cargo("check") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` + +Caused by: + feature `f1` includes `dep:bar/bar-feat` with both `dep:` and `/` + To fix this, remove the `dep:` prefix. +", + ) + .run(); + + // Only when the other 3 cases aren't true should it give some extra help. + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = {version = "1.0", optional = true } + + [features] + f1 = ["dep:bar/bar-feat"] + "#, + ); + p.cargo("check") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` + +Caused by: + feature `f1` includes `dep:bar/bar-feat` with both `dep:` and `/` + To fix this, remove the `dep:` prefix. + If the intent is to avoid creating an implicit feature `bar` for an optional \ + dependency, then consider replacing this with two values: + \"dep:bar\", \"bar/bar-feat\" +", + ) + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/features.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/features.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/features.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/features.rs 2023-01-10 13:41:19.000000000 +0000 @@ -10,7 +10,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -42,7 +42,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -89,7 +89,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -124,7 +124,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -168,7 +168,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -200,7 +200,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -231,7 +231,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -263,7 +263,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -298,7 +298,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -326,7 +326,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -374,7 +374,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -445,7 +445,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -498,7 +498,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -554,7 +554,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -578,7 +578,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -600,7 +600,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -658,7 +658,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -707,7 +707,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -855,7 +855,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -895,7 +895,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1144,7 +1144,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1228,7 +1228,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1273,7 +1273,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1321,7 +1321,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1390,7 +1390,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1419,7 +1419,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1432,7 +1432,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.0.1" authors = [] @@ -1514,7 +1514,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1548,7 +1548,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1568,7 +1568,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.0.1" authors = [] @@ -1589,7 +1589,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1617,7 +1617,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1646,7 +1646,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1892,7 +1892,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" [features] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/freshness.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/freshness.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/freshness.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/freshness.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1966,7 +1966,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1979,7 +1979,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -1992,7 +1992,7 @@ .file( "a/b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -2023,7 +2023,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2035,7 +2035,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/git_auth.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/git_auth.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/git_auth.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/git_auth.rs 2023-01-10 13:41:19.000000000 +0000 @@ -110,7 +110,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -183,7 +183,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -244,7 +244,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -351,7 +351,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/git.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/git.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/git.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/git.rs 2023-01-10 13:41:19.000000000 +0000 @@ -34,7 +34,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -102,7 +102,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -178,7 +178,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -251,7 +251,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.0" @@ -291,7 +291,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "dep1" version = "0.5.0" @@ -333,7 +333,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -381,7 +381,7 @@ .file( "vendor/dep3/Cargo.toml", r#" - [project] + [package] name = "dep3" version = "0.5.0" [dependencies] @@ -396,7 +396,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -456,7 +456,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -507,7 +507,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -566,7 +566,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.0" authors = [] @@ -642,7 +642,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -903,7 +903,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -936,6 +936,73 @@ } #[cargo_test] +fn dep_with_relative_submodule() { + let foo = project(); + let base = git::new("base", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "base" + version = "0.5.0" + + [dependencies] + deployment.path = "deployment" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn dep() { + deployment::deployment_func(); + } + "#, + ) + }); + let _deployment = git::new("deployment", |project| { + project + .file("src/lib.rs", "pub fn deployment_func() {}") + .file("Cargo.toml", &basic_lib_manifest("deployment")) + }); + + let base_repo = git2::Repository::open(&base.root()).unwrap(); + git::add_submodule(&base_repo, "../deployment", Path::new("deployment")); + git::commit(&base_repo); + + let project = foo + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.5.0" + + [dependencies.base] + git = '{}' + "#, + base.url() + ), + ) + .file("src/lib.rs", "pub fn foo() { }") + .build(); + + project + .cargo("build") + .with_stderr( + "\ +[UPDATING] git repository [..] +[UPDATING] git submodule `file://[..]/deployment` +[COMPILING] deployment [..] +[COMPILING] base [..] +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] fn dep_with_bad_submodule() { let project = project(); let git_project = git::new("dep1", |project| { @@ -969,7 +1036,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1043,7 +1110,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.0" authors = [] @@ -1078,7 +1145,7 @@ "aaa/Cargo.toml", &format!( r#" - [project] + [package] name = "bar" version = "0.5.0" publish = true @@ -1090,7 +1157,7 @@ "bbb/Cargo.toml", &format!( r#" - [project] + [package] name = "bar" version = "0.5.0" publish = true @@ -1105,7 +1172,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1151,7 +1218,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1223,7 +1290,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.0" authors = [] @@ -1318,7 +1385,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] @@ -1424,7 +1491,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1542,7 +1609,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1585,7 +1652,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "p1" version = "0.5.0" authors = [] @@ -1625,7 +1692,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "p2" version = "0.5.0" authors = [] @@ -1661,7 +1728,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -1681,7 +1748,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -1737,7 +1804,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1772,7 +1839,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1834,7 +1901,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1881,7 +1948,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1954,7 +2021,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1988,7 +2055,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2038,7 +2105,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2088,7 +2155,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2101,7 +2168,7 @@ "b/Cargo.toml", &format!( r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -2130,7 +2197,7 @@ "b/Cargo.toml", &format!( r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -2161,7 +2228,7 @@ p.file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2199,7 +2266,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2480,7 +2547,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "dep1" version = "0.5.0" @@ -2508,7 +2575,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -2554,7 +2621,7 @@ | 8 | categories = [\"algorithms\"] | ^ - Duplicate key `categories` in table `project` + Duplicate key `categories` in table `package` ", path2url(&git_root), @@ -2610,7 +2677,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2656,7 +2723,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -2708,7 +2775,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "fo" version = "0.5.0" authors = [] @@ -2756,7 +2823,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" edition = "2018" @@ -2985,7 +3052,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" [dependencies] @@ -3025,7 +3092,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -3082,7 +3149,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -3136,7 +3203,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" [dependencies] @@ -3151,7 +3218,7 @@ "a/Cargo.toml", &format!( r#" - [project] + [package] name = "a" version = "0.5.0" [dependencies] @@ -3407,7 +3474,7 @@ let manifest = |extra| { format!( r#" - [project] + [package] name = "foo" version = "0.0.1" edition = "2018" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/inheritable_workspace_fields.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/inheritable_workspace_fields.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/inheritable_workspace_fields.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/inheritable_workspace_fields.rs 2023-01-10 13:41:19.000000000 +0000 @@ -107,7 +107,7 @@ #[cargo_test] fn inherit_own_workspace_fields() { - registry::init(); + let registry = registry::init(); let p = project().build(); @@ -160,7 +160,9 @@ .file("bar.txt", "") // should be included when packaging .build(); - p.cargo("publish --token sekrit").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .run(); publish::validate_upload_with_contents( r#" { @@ -231,11 +233,12 @@ #[cargo_test] fn inherit_own_dependencies() { + let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.2.0" authors = [] @@ -284,7 +287,9 @@ assert!(lockfile.contains("dep")); assert!(lockfile.contains("dep-dev")); assert!(lockfile.contains("dep-build")); - p.cargo("publish --token sekrit").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .run(); publish::validate_upload_with_contents( r#" { @@ -298,7 +303,6 @@ "kind": "normal", "name": "dep", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.1" }, @@ -308,7 +312,6 @@ "kind": "dev", "name": "dep-dev", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.5.2" }, @@ -318,7 +321,6 @@ "kind": "build", "name": "dep-build", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.8" } @@ -366,11 +368,12 @@ #[cargo_test] fn inherit_own_detailed_dependencies() { + let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.2.0" authors = [] @@ -408,7 +411,9 @@ p.cargo("check").run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); - p.cargo("publish --token sekrit").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .run(); publish::validate_upload_with_contents( r#" { @@ -422,7 +427,6 @@ "kind": "normal", "name": "dep", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.1.2" } @@ -519,7 +523,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.2.0" authors = [] @@ -560,7 +564,7 @@ #[cargo_test] fn inherit_workspace_fields() { - registry::init(); + let registry = registry::init(); let p = project().build(); @@ -624,7 +628,10 @@ .file("bar/bar.txt", "") // should be included when packaging .build(); - p.cargo("publish --token sekrit").cwd("bar").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .cwd("bar") + .run(); publish::validate_upload_with_contents( r#" { @@ -701,6 +708,7 @@ #[cargo_test] fn inherit_dependencies() { + let registry = registry::init(); let p = project() .file( "Cargo.toml", @@ -716,7 +724,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] workspace = ".." name = "bar" version = "0.2.0" @@ -755,7 +763,10 @@ assert!(lockfile.contains("dep")); assert!(lockfile.contains("dep-dev")); assert!(lockfile.contains("dep-build")); - p.cargo("publish --token sekrit").cwd("bar").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .cwd("bar") + .run(); publish::validate_upload_with_contents( r#" { @@ -769,7 +780,6 @@ "kind": "normal", "name": "dep", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.1" }, @@ -779,7 +789,6 @@ "kind": "dev", "name": "dep-dev", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.5.2" }, @@ -789,7 +798,6 @@ "kind": "build", "name": "dep-build", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.8" } @@ -850,7 +858,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] workspace = ".." name = "bar" version = "0.2.0" @@ -900,7 +908,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] workspace = ".." name = "bar" version = "0.2.0" @@ -938,7 +946,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.2.0" authors = [] @@ -1011,7 +1019,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] workspace = ".." name = "bar" version = "0.2.0" @@ -1053,7 +1061,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] workspace = ".." name = "bar" version = "0.2.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/init/unknown_flags/stderr.log cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/init/unknown_flags/stderr.log --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/init/unknown_flags/stderr.log 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/init/unknown_flags/stderr.log 2023-01-10 13:41:19.000000000 +0000 @@ -1,8 +1,7 @@ error: Found argument '--flag' which wasn't expected, or isn't valid in this context - If you tried to supply `--flag` as a value rather than a flag, use `-- --flag` + If you tried to supply '--flag' as a value rather than a flag, use '-- --flag' -USAGE: - cargo[EXE] init +Usage: cargo[EXE] init -For more information try --help +For more information try '--help' diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/install.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/install.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/install.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/install.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1448,8 +1448,7 @@ .with_status(1) .with_stderr_contains( "\ -error: The argument '--version ' was provided more than once, \ -but cannot be used multiple times +error: The argument '--version ' was provided more than once, but cannot be used multiple times ", ) .run(); @@ -1652,7 +1651,7 @@ .arg("") .with_status(1) .with_stderr_contains( - "[ERROR] The argument '...' requires a value but none was supplied", + "[ERROR] The argument '[crate]...' requires a value but none was supplied", ) .run(); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/local_registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/local_registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/local_registry.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/local_registry.rs 2023-01-10 13:41:19.000000000 +0000 @@ -33,7 +33,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -73,7 +73,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -109,7 +109,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -152,7 +152,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -204,7 +204,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -258,7 +258,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -308,7 +308,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.1.0" authors = [] @@ -326,7 +326,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -370,7 +370,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -429,7 +429,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -498,7 +498,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/lockfile_compat.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/lockfile_compat.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/lockfile_compat.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/lockfile_compat.rs 2023-01-10 13:41:19.000000000 +0000 @@ -52,7 +52,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -98,7 +98,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -125,7 +125,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -191,7 +191,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -254,7 +254,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -320,7 +320,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -493,7 +493,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -545,7 +545,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -604,7 +604,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -619,7 +619,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.2.0" "#, @@ -670,7 +670,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -697,7 +697,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -749,7 +749,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -773,7 +773,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" "#, @@ -817,7 +817,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.1.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/login.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/login.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/login.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/login.rs 2023-01-10 13:41:19.000000000 +0000 @@ -90,3 +90,36 @@ assert!(check_token(TOKEN, Some(reg))); assert!(check_token(TOKEN2, Some(reg2))); } + +#[cargo_test] +fn empty_login_token() { + let registry = RegistryBuilder::new() + .no_configure_registry() + .no_configure_token() + .build(); + setup_new_credentials(); + + cargo_process("login") + .replace_crates_io(registry.index_url()) + .with_stdout("please paste the API Token found on [..]/me below") + .with_stdin("\t\n") + .with_stderr( + "\ +[UPDATING] crates.io index +[ERROR] please provide a non-empty token +", + ) + .with_status(101) + .run(); + + cargo_process("login") + .replace_crates_io(registry.index_url()) + .arg("") + .with_stderr( + "\ +[ERROR] please provide a non-empty token +", + ) + .with_status(101) + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/logout.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/logout.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/logout.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/logout.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1,6 +1,7 @@ //! Tests for the `cargo logout` command. use cargo_test_support::install::cargo_home; +use cargo_test_support::registry::TestRegistry; use cargo_test_support::{cargo_process, registry}; use std::fs; use toml_edit::easy as toml; @@ -44,11 +45,12 @@ } } -fn simple_logout_test(reg: Option<&str>, flag: &str) { +fn simple_logout_test(registry: &TestRegistry, reg: Option<&str>, flag: &str) { let msg = reg.unwrap_or("crates.io"); check_config_token(reg, true); cargo_process(&format!("logout -Z unstable-options {}", flag)) .masquerade_as_nightly_cargo(&["cargo-logout"]) + .replace_crates_io(registry.index_url()) .with_stderr(&format!( "\ [UPDATING] [..] @@ -61,6 +63,7 @@ cargo_process(&format!("logout -Z unstable-options {}", flag)) .masquerade_as_nightly_cargo(&["cargo-logout"]) + .replace_crates_io(registry.index_url()) .with_stderr(&format!( "\ [LOGOUT] not currently logged in to `{}` @@ -73,12 +76,12 @@ #[cargo_test] fn default_registry() { - registry::init(); - simple_logout_test(None, ""); + let registry = registry::init(); + simple_logout_test(®istry, None, ""); } #[cargo_test] fn other_registry() { - registry::alt_init(); - simple_logout_test(Some("alternative"), "--registry alternative"); + let registry = registry::alt_init(); + simple_logout_test(®istry, Some("alternative"), "--registry alternative"); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/lto.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/lto.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/lto.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/lto.rs 2023-01-10 13:41:19.000000000 +0000 @@ -482,7 +482,7 @@ fn cdylib_and_rlib() { let p = project_with_dep("'cdylib', 'rlib'"); let output = p.cargo("build --release -v").exec_with_output().unwrap(); - // `registry` is ObjectAndBitcode because because it needs Object for the + // `registry` is ObjectAndBitcode because it needs Object for the // rlib, and Bitcode for the cdylib (which doesn't support LTO). verify_lto( &output, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/main.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/main.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/main.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/main.rs 2023-01-10 13:41:19.000000000 +0000 @@ -25,6 +25,7 @@ mod cargo_config; mod cargo_env_config; mod cargo_features; +mod cargo_remove; mod cargo_targets; mod cfg; mod check; @@ -117,6 +118,7 @@ mod rustflags; mod search; mod shell_quoting; +mod source_replacement; mod standard_lib; mod test; mod timings; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/member_discovery.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/member_discovery.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/member_discovery.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/member_discovery.rs 2023-01-10 13:41:19.000000000 +0000 @@ -22,7 +22,7 @@ .file( "crates/bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/member_errors.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/member_errors.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/member_errors.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/member_errors.rs 2023-01-10 13:41:19.000000000 +0000 @@ -17,7 +17,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -32,7 +32,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -66,7 +66,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -81,7 +81,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -116,7 +116,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -131,7 +131,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/metadata.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/metadata.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/metadata.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/metadata.rs 2023-01-10 13:41:19.000000000 +0000 @@ -287,7 +287,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1725,7 +1725,55 @@ r#"[ERROR] failed to parse manifest at `[..]` Caused by: - invalid type: string "", expected vector of strings for key `package.authors`"#, + invalid type: string "", expected a sequence for key `package.authors`"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_with_invalid_version_field() { + let p = project() + .file("src/foo.rs", "") + .file( + "Cargo.toml", + r#" + [package] + version = 1 + "#, + ) + .build(); + + p.cargo("metadata") + .with_status(101) + .with_stderr( + r#"[ERROR] failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `1`, expected SemVer version for key `package.version`"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_with_invalid_publish_field() { + let p = project() + .file("src/foo.rs", "") + .file( + "Cargo.toml", + r#" + [package] + publish = "foo" + "#, + ) + .build(); + + p.cargo("metadata") + .with_status(101) + .with_stderr( + r#"[ERROR] failed to parse manifest at `[..]` + +Caused by: + invalid type: string "foo", expected a boolean or vector of strings for key `package.publish`"#, ) .run(); } @@ -1862,8 +1910,8 @@ .with_status(1) .with_stderr_contains( "\ -error: \"2\" isn't a valid value for '--format-version ' -[possible values: 1] +error: '2' isn't a valid value for '--format-version ' + [possible values: 1] ", ) .run(); @@ -2239,7 +2287,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] @@ -2259,7 +2307,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] @@ -2387,7 +2435,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2619,7 +2667,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" links = "a" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/net_config.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/net_config.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/net_config.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/net_config.rs 2023-01-10 13:41:19.000000000 +0000 @@ -8,7 +8,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -44,7 +44,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/new.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/new.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/new.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/new.rs 2023-01-10 13:41:19.000000000 +0000 @@ -107,7 +107,7 @@ .with_stderr_contains( "\ error: The following required arguments were not provided: - + ", ) .run(); @@ -380,7 +380,7 @@ #[cargo_test] fn new_with_bad_edition() { cargo_process("new --edition something_else foo") - .with_stderr_contains("error: \"something_else\" isn't a valid value[..]") + .with_stderr_contains("error: 'something_else' isn't a valid value[..]") .with_status(1) .run(); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/offline.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/offline.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/offline.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/offline.rs 2023-01-10 13:41:19.000000000 +0000 @@ -12,7 +12,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" [dependencies] @@ -40,7 +40,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" [dependencies] @@ -104,7 +104,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -121,7 +121,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" @@ -151,7 +151,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" @@ -204,7 +204,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -220,7 +220,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -257,7 +257,7 @@ "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["chabapok@example.com"] @@ -298,7 +298,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" @@ -337,7 +337,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -393,7 +393,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "cache_git_dep" version = "0.5.0" @@ -413,7 +413,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "cache_git_dep" version = "0.5.0" @@ -432,7 +432,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -470,7 +470,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -591,7 +591,7 @@ "Cargo.toml", format!( r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -609,7 +609,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/out_dir.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/out_dir.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/out_dir.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/out_dir.rs 2023-01-10 13:41:19.000000000 +0000 @@ -31,7 +31,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -67,7 +67,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -104,7 +104,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -139,7 +139,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/owner.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/owner.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/owner.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/owner.rs 2023-01-10 13:41:19.000000000 +0000 @@ -16,7 +16,7 @@ #[cargo_test] fn simple_list() { - registry::init(); + let registry = registry::init(); let content = r#"{ "users": [ { @@ -36,7 +36,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -47,7 +47,8 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("owner -l --token sekrit") + p.cargo("owner -l") + .replace_crates_io(registry.index_url()) .with_stdout( "\ github:rust-lang:core (Core) @@ -59,14 +60,14 @@ #[cargo_test] fn simple_add() { - registry::init(); + let registry = registry::init(); setup("foo", None); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -77,10 +78,11 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("owner -a username --token sekrit") + p.cargo("owner -a username") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( - " Updating `[..]` index + " Updating crates.io index error: failed to invite owners to crate `foo` on registry at file://[..] Caused by: @@ -91,14 +93,14 @@ #[cargo_test] fn simple_remove() { - registry::init(); + let registry = registry::init(); setup("foo", None); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -109,10 +111,11 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("owner -r username --token sekrit") + p.cargo("owner -r username") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( - " Updating `[..]` index + " Updating crates.io index Owner removing [\"username\"] from crate foo error: failed to remove owners from crate `foo` on registry at file://[..] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/package.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/package.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/package.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/package.rs 2023-01-10 13:41:19.000000000 +0000 @@ -17,7 +17,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -85,7 +85,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -111,7 +111,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -251,7 +251,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" description = "foo" version = "0.0.1" @@ -290,7 +290,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" description = "foo" version = "0.0.1" @@ -328,7 +328,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -367,7 +367,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -403,7 +403,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -525,7 +525,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -577,7 +577,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = ["foo@example.com"] @@ -680,7 +680,7 @@ #[cargo_test] fn ignore_nested() { let cargo_toml = r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -814,7 +814,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -865,7 +865,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1022,7 +1022,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" license = "MIT" @@ -1039,7 +1039,7 @@ p.change_file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" license = "MIT" @@ -1131,7 +1131,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1139,7 +1139,7 @@ license = "MIT" description = "foo" - [project.metadata] + [package.metadata] foo = 'bar' [workspace] @@ -1203,7 +1203,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" @@ -1257,7 +1257,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1439,7 +1439,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1468,7 +1468,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1497,7 +1497,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2071,7 +2071,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2240,7 +2240,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2331,7 +2331,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2346,7 +2346,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/patch.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/patch.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/patch.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/patch.rs 2023-01-10 13:41:19.000000000 +0000 @@ -1485,7 +1485,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1501,7 +1501,7 @@ .file( "baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "1.1.0-pre.1" authors = [] @@ -1538,7 +1538,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1551,7 +1551,7 @@ .file( "baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "1.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/path.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/path.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/path.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/path.rs 2023-01-10 13:41:19.000000000 +0000 @@ -15,7 +15,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -31,7 +31,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -109,7 +109,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -151,7 +151,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -199,7 +199,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -215,7 +215,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -260,7 +260,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -308,7 +308,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -322,7 +322,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -395,7 +395,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -411,7 +411,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -447,7 +447,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -492,7 +492,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -657,7 +657,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" @@ -673,7 +673,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" @@ -732,7 +732,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -782,7 +782,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -798,7 +798,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -840,7 +840,7 @@ .file( "a/a1/Cargo.toml", r#" - [project] + [package] name = "a1" version = "0.5.0" authors = [] @@ -854,7 +854,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.5.0" authors = [] @@ -915,7 +915,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "top" version = "0.5.0" authors = [] @@ -930,7 +930,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -949,7 +949,7 @@ p.change_file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -981,7 +981,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "top" version = "0.5.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/profiles.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/profiles.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/profiles.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/profiles.rs 2023-01-10 13:41:19.000000000 +0000 @@ -240,7 +240,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -256,7 +256,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -301,7 +301,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -480,7 +480,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "top" version = "0.5.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/publish.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/publish.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/publish.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/publish.rs 2023-01-10 13:41:19.000000000 +0000 @@ -5,6 +5,7 @@ use cargo_test_support::registry::{self, Package, Response}; use cargo_test_support::{basic_manifest, no_such_file_err_msg, project, publish}; use std::fs; +use std::sync::{Arc, Mutex}; const CLEAN_FOO_JSON: &str = r#" { @@ -84,29 +85,16 @@ ); } -fn validate_upload_foo_clean() { - publish::validate_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".cargo_vcs_info.json", - ], - ); -} - #[cargo_test] fn simple() { - registry::init(); + // HACK below allows us to use a local registry + let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -117,14 +105,25 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish --no-verify --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.0.1") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ -[UPDATING] `dummy-registry` index +[UPDATING] crates.io index [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] ", ) .run(); @@ -132,17 +131,18 @@ validate_upload_foo(); } +// Check that the `token` key works at the root instead of under a +// `[registry]` table. #[cargo_test] fn old_token_location() { - // Check that the `token` key works at the root instead of under a - // `[registry]` table. - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -158,6 +158,7 @@ // Verify can't publish without a token. p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr_contains( "[ERROR] no upload token found, \ @@ -165,35 +166,36 @@ ) .run(); - fs::write(&credentials, r#"token = "api-token""#).unwrap(); + fs::write(&credentials, format!(r#"token = "{}""#, registry.token())).unwrap(); p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ -[UPDATING] `dummy-registry` index -[WARNING] using `registry.token` config value with source replacement is deprecated -This may become a hard error in the future[..] -Use the --token command-line flag to remove this warning. +[UPDATING] crates.io index [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] ", ) .run(); - validate_upload_foo(); + // Skip `validate_upload_foo` as we just cared we got far enough for verify the token behavior. + // Other tests will verify the endpoint gets the right payload. } #[cargo_test] fn simple_with_index() { - let registry = registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -204,22 +206,37 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish --no-verify --token sekrit --index") + p.cargo("publish --no-verify") + .arg("--token") + .arg(registry.token()) + .arg("--index") .arg(registry.index_url().as_str()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] +", + ) .run(); - validate_upload_foo(); + // Skip `validate_upload_foo` as we just cared we got far enough for verify the VCS behavior. + // Other tests will verify the endpoint gets the right payload. } #[cargo_test] fn git_deps() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -233,7 +250,8 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish -v --no-verify --token sekrit") + p.cargo("publish -v --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -249,13 +267,14 @@ #[cargo_test] fn path_dependency_no_version() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -271,7 +290,8 @@ .file("bar/src/lib.rs", "") .build(); - p.cargo("publish --token sekrit") + p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -287,13 +307,14 @@ #[cargo_test] fn unpublishable_crate() { + // Use local registry for faster test times since no publish will occur let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -311,7 +332,7 @@ .with_stderr( "\ [ERROR] `foo` cannot be published. -The registry `crates-io` is not listed in the `publish` value in Cargo.toml. +`package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing. ", ) .run(); @@ -319,14 +340,16 @@ #[cargo_test] fn dont_publish_dirty() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); + let p = project().file("bar", "").build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -340,11 +363,12 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish --token sekrit") + p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ -[UPDATING] `[..]` index +[UPDATING] crates.io index error: 1 files in the working directory contain changes that were not yet \ committed into git: @@ -358,7 +382,8 @@ #[cargo_test] fn publish_clean() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project().build(); @@ -366,7 +391,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -380,14 +405,29 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish --token sekrit").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] +", + ) + .run(); - validate_upload_foo_clean(); + // Skip `validate_upload_foo_clean` as we just cared we got far enough for verify the VCS behavior. + // Other tests will verify the endpoint gets the right payload. } #[cargo_test] fn publish_in_sub_repo() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project().no_manifest().file("baz", "").build(); @@ -395,7 +435,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -409,14 +449,30 @@ .file("bar/src/main.rs", "fn main() {}") .build(); - p.cargo("publish --token sekrit").cwd("bar").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .cwd("bar") + .with_stderr( + "\ +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] +", + ) + .run(); - validate_upload_foo_clean(); + // Skip `validate_upload_foo_clean` as we just cared we got far enough for verify the VCS behavior. + // Other tests will verify the endpoint gets the right payload. } #[cargo_test] fn publish_when_ignored() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project().file("baz", "").build(); @@ -424,7 +480,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -439,25 +495,29 @@ .file(".gitignore", "baz") .build(); - p.cargo("publish --token sekrit").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] +", + ) + .run(); - publish::validate_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".gitignore", - ".cargo_vcs_info.json", - ], - ); + // Skip `validate_upload` as we just cared we got far enough for verify the VCS behavior. + // Other tests will verify the endpoint gets the right payload. } #[cargo_test] fn ignore_when_crate_ignored() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project().no_manifest().file("bar/baz", "").build(); @@ -466,7 +526,7 @@ .nocommit_file( "bar/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -478,24 +538,30 @@ "#, ) .nocommit_file("bar/src/main.rs", "fn main() {}"); - p.cargo("publish --token sekrit").cwd("bar").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .cwd("bar") + .with_stderr( + "\ +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] +", + ) + .run(); - publish::validate_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - "baz", - ], - ); + // Skip `validate_upload` as we just cared we got far enough for verify the VCS behavior. + // Other tests will verify the endpoint gets the right payload. } #[cargo_test] fn new_crate_rejected() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project().file("baz", "").build(); @@ -503,7 +569,7 @@ .nocommit_file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -515,7 +581,8 @@ "#, ) .nocommit_file("src/main.rs", "fn main() {}"); - p.cargo("publish --token sekrit") + p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr_contains( "[ERROR] 3 files in the working directory contain \ @@ -526,13 +593,14 @@ #[cargo_test] fn dry_run() { + // Use local registry for faster test times since no publish will occur let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -567,13 +635,11 @@ #[cargo_test] fn registry_not_in_publish_list() { - registry::init(); - let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -594,7 +660,7 @@ .with_stderr( "\ [ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. +The registry `alternative` is not listed in the `package.publish` value in Cargo.toml. ", ) .run(); @@ -602,13 +668,11 @@ #[cargo_test] fn publish_empty_list() { - registry::init(); - let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -625,7 +689,7 @@ .with_stderr( "\ [ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. +`package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing. ", ) .run(); @@ -633,6 +697,7 @@ #[cargo_test] fn publish_allowed_registry() { + // HACK below allows us to use a local registry registry::alt_init(); let p = project().build(); @@ -641,7 +706,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -656,7 +721,29 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish --registry alternative").run(); + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.0.1") + .file("src/lib.rs", "") + .alternative(true) + .publish(); + + p.cargo("publish --registry alternative") + .with_stderr( + "\ +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] `alternative` index +", + ) + .run(); publish::validate_alt_upload( CLEAN_FOO_JSON, @@ -673,6 +760,7 @@ #[cargo_test] fn publish_implicitly_to_only_allowed_registry() { + // HACK below allows us to use a local registry registry::alt_init(); let p = project().build(); @@ -681,7 +769,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -696,7 +784,30 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish").run(); + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.0.1") + .file("src/lib.rs", "") + .alternative(true) + .publish(); + + p.cargo("publish") + .with_stderr( + "\ +[NOTE] Found `alternative` as only allowed registry. Publishing to it automatically. +[UPDATING] `alternative` index +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] `alternative` index +", + ) + .run(); publish::validate_alt_upload( CLEAN_FOO_JSON, @@ -713,15 +824,13 @@ #[cargo_test] fn publish_fail_with_no_registry_specified() { - registry::init(); - let p = project().build(); let _ = repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -741,7 +850,7 @@ .with_stderr( "\ [ERROR] `foo` cannot be published. -The registry `crates-io` is not listed in the `publish` value in Cargo.toml. +The registry `crates-io` is not listed in the `package.publish` value in Cargo.toml. ", ) .run(); @@ -749,13 +858,11 @@ #[cargo_test] fn block_publish_no_registry() { - registry::init(); - let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -772,22 +879,23 @@ .with_stderr( "\ [ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. +`package.publish` is set to `false` or an empty list in Cargo.toml and prevents publishing. ", ) .run(); } +// Explicitly setting `crates-io` in the publish list. #[cargo_test] fn publish_with_crates_io_explicit() { - // Explicitly setting `crates-io` in the publish list. - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -804,23 +912,39 @@ .with_stderr( "\ [ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. +The registry `alternative` is not listed in the `package.publish` value in Cargo.toml. ", ) .run(); - p.cargo("publish").run(); + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[UPDATING] [..] +[WARNING] [..] +[..] +[PACKAGING] [..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] crates.io index +", + ) + .run(); } #[cargo_test] fn publish_with_select_features() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -840,20 +964,34 @@ ) .build(); - p.cargo("publish --features required --token sekrit") - .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") + p.cargo("publish --features required") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] crates.io index +", + ) .run(); } #[cargo_test] fn publish_with_all_features() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -873,20 +1011,34 @@ ) .build(); - p.cargo("publish --all-features --token sekrit") - .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") + p.cargo("publish --all-features") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] crates.io index +", + ) .run(); } #[cargo_test] fn publish_with_no_default_features() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -906,21 +1058,24 @@ ) .build(); - p.cargo("publish --no-default-features --token sekrit") - .with_stderr_contains("error: This crate requires `required` feature!") + p.cargo("publish --no-default-features") + .replace_crates_io(registry.index_url()) .with_status(101) + .with_stderr_contains("error: This crate requires `required` feature!") .run(); } #[cargo_test] fn publish_with_patch() { + // HACK below allows us to use a local registry + let registry = registry::init(); Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -947,19 +1102,42 @@ p.cargo("build").run(); // Check that verify fails with patched crate which has new functionality. - p.cargo("publish --token sekrit") - .with_stderr_contains("[..]newfunc[..]") + p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_status(101) + .with_stderr_contains("[..]newfunc[..]") .run(); // Remove the usage of new functionality and try again. p.change_file("src/main.rs", "extern crate bar; pub fn main() {}"); - p.cargo("publish --token sekrit").run(); + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.0.1") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] crates.io index +", + ) + .run(); - // Note, use of `registry` in the deps here is an artifact that this - // publishes to a fake, local registry that is pretending to be crates.io. - // Normal publishes would set it to null. publish::validate_upload( r#" { @@ -973,7 +1151,6 @@ "kind": "normal", "name": "bar", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } @@ -1000,13 +1177,15 @@ #[cargo_test] fn publish_checks_for_token_before_verify() { - registry::init(); + let registry = registry::RegistryBuilder::new() + .no_configure_token() + .build(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1017,11 +1196,9 @@ .file("src/main.rs", "fn main() {}") .build(); - let credentials = paths::home().join(".cargo/credentials"); - fs::remove_file(&credentials).unwrap(); - // Assert upload token error before the package is verified p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr_contains( "[ERROR] no upload token found, \ @@ -1032,8 +1209,20 @@ // Assert package verified successfully on dry run p.cargo("publish --dry-run") - .with_status(0) - .with_stderr_contains("[VERIFYING] foo v0.0.1 ([CWD])") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 [..] +[WARNING] aborting upload due to dry run +", + ) .run(); } @@ -1053,12 +1242,12 @@ .file("src/lib.rs", "") .build(); - p.cargo("publish --token sekrit") + p.cargo("publish") .with_status(101) .with_stderr( "\ -[ERROR] registry `[..]/foo/registry` does not support API commands. -Check for a source-replacement in .cargo/config. +[ERROR] crates-io is replaced with non-remote-registry source registry `[..]/foo/registry`; +include `--registry crates-io` to use crates.io ", ) .run(); @@ -1074,20 +1263,23 @@ "#, ); - p.cargo("publish --token sekrit") + p.cargo("publish") .with_status(101) .with_stderr( "\ -[ERROR] dir [..]/foo/vendor does not support API commands. -Check for a source-replacement in .cargo/config. +[ERROR] crates-io is replaced with non-remote-registry source dir [..]/foo/vendor; +include `--registry crates-io` to use crates.io ", ) .run(); } +// A dependency with both `git` and `version`. #[cargo_test] fn publish_git_with_version() { - // A dependency with both `git` and `version`. + // HACK below allows us to use a local registry + let registry = registry::init(); + Package::new("dep1", "1.0.1") .file("src/lib.rs", "pub fn f() -> i32 {1}") .publish(); @@ -1128,7 +1320,29 @@ .build(); p.cargo("run").with_stdout("2").run(); - p.cargo("publish --no-verify --token sekrit").run(); + + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[UPLOADING] foo v0.1.0 ([CWD]) +[UPDATING] crates.io index +", + ) + .run(); publish::validate_upload_with_contents( r#" @@ -1143,7 +1357,6 @@ "kind": "normal", "name": "dep1", "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } @@ -1212,7 +1425,8 @@ #[cargo_test] fn publish_dev_dep_no_version() { - registry::init(); + // HACK below allows us to use a local registry + let registry = registry::init(); let p = project() .file( @@ -1237,12 +1451,23 @@ .file("bar/src/lib.rs", "") .build(); - p.cargo("publish --no-verify --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] crates.io index ", ) .run(); @@ -1295,7 +1520,8 @@ #[cargo_test] fn credentials_ambiguous_filename() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let credentials_toml = paths::home().join(".cargo/credentials.toml"); fs::write(credentials_toml, r#"token = "api-token""#).unwrap(); @@ -1304,7 +1530,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1315,22 +1541,29 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("publish --no-verify --token sekrit") - .with_stderr_contains( + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) + .with_stderr( "\ [WARNING] Both `[..]/credentials` and `[..]/credentials.toml` exist. Using `[..]/credentials` +[..] +[..] +[..] +[..] +[UPLOADING] foo v0.0.1 [..] +[UPDATING] crates.io index ", ) .run(); - - validate_upload_foo(); } +// --index will not load registry.token to avoid possibly leaking +// crates.io token to another server. #[cargo_test] fn index_requires_token() { - // --index will not load registry.token to avoid possibly leaking - // crates.io token to another server. + // Use local registry for faster test times since no publish will occur let registry = registry::init(); + let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); @@ -1361,16 +1594,15 @@ .run(); } +// publish with source replacement without --registry #[cargo_test] -fn registry_token_with_source_replacement() { - // publish with source replacement without --token +fn cratesio_source_replacement() { registry::init(); - let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1382,16 +1614,11 @@ .build(); p.cargo("publish --no-verify") + .with_status(101) .with_stderr( "\ -[UPDATING] [..] -[WARNING] using `registry.token` config value with source replacement is deprecated -This may become a hard error in the future[..] -Use the --token command-line flag to remove this warning. -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[UPLOADING] foo v0.0.1 ([CWD]) +[ERROR] crates-io is replaced with remote registry dummy-registry; +include `--registry dummy-registry` or `--registry crates-io` ", ) .run(); @@ -1399,7 +1626,9 @@ #[cargo_test] fn publish_with_missing_readme() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); + let p = project() .file( "Cargo.toml", @@ -1417,7 +1646,8 @@ .file("src/lib.rs", "") .build(); - p.cargo("publish --no-verify --token sekrit") + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr(&format!( "\ @@ -1437,13 +1667,13 @@ .run(); } +// Registry returns an API error. #[cargo_test] fn api_error_json() { - // Registry returns an API error. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() - .add_responder("/api/v1/crates/new", |_| Response { + .add_responder("/api/v1/crates/new", |_, _| Response { body: br#"{"errors": [{"detail": "you must be logged in"}]}"#.to_vec(), code: 403, headers: vec![], @@ -1454,7 +1684,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1484,13 +1714,13 @@ .run(); } +// Registry returns an API error with a 200 status code. #[cargo_test] fn api_error_200() { - // Registry returns an API error with a 200 status code. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() - .add_responder("/api/v1/crates/new", |_| Response { + .add_responder("/api/v1/crates/new", |_, _| Response { body: br#"{"errors": [{"detail": "max upload size is 123"}]}"#.to_vec(), code: 200, headers: vec![], @@ -1501,7 +1731,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1531,13 +1761,13 @@ .run(); } +// Registry returns an error code without a JSON message. #[cargo_test] fn api_error_code() { - // Registry returns an error code without a JSON message. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() - .add_responder("/api/v1/crates/new", |_| Response { + .add_responder("/api/v1/crates/new", |_, _| Response { body: br#"go away"#.to_vec(), code: 400, headers: vec![], @@ -1548,7 +1778,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1584,13 +1814,13 @@ .run(); } +// Registry has a network error. #[cargo_test] fn api_curl_error() { - // Registry has a network error. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() - .add_responder("/api/v1/crates/new", |_| { + .add_responder("/api/v1/crates/new", |_, _| { panic!("broke"); }) .build(); @@ -1598,7 +1828,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1633,13 +1863,13 @@ .run(); } +// Registry returns an invalid response. #[cargo_test] fn api_other_error() { - // Registry returns an invalid response. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() - .add_responder("/api/v1/crates/new", |_| Response { + .add_responder("/api/v1/crates/new", |_, _| Response { body: b"\xff".to_vec(), code: 200, headers: vec![], @@ -1650,7 +1880,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1685,7 +1915,8 @@ #[cargo_test] fn in_package_workspace() { - registry::init(); + // HACK below allows us to use a local registry + let registry = registry::init(); let p = project() .file( @@ -1713,7 +1944,15 @@ .file("li/src/main.rs", "fn main() {}") .build(); - p.cargo("publish -p li --no-verify --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("li", "0.0.1").file("src/lib.rs", "").publish(); + + p.cargo("publish -p li --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -1721,6 +1960,7 @@ See [..] [PACKAGING] li v0.0.1 ([CWD]/li) [UPLOADING] li v0.0.1 ([CWD]/li) +[UPDATING] crates.io index ", ) .run(); @@ -1730,7 +1970,8 @@ #[cargo_test] fn with_duplicate_spec_in_members() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( @@ -1770,7 +2011,8 @@ .file("bar/src/main.rs", "fn main() {}") .build(); - p.cargo("publish --no-verify --token sekrit") + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "error: the `-p` argument must be specified to select a single package to publish", @@ -1780,7 +2022,8 @@ #[cargo_test] fn in_package_workspace_with_members_with_features_old() { - registry::init(); + // HACK below allows us to use a local registry + let registry = registry::init(); let p = project() .file( @@ -1807,7 +2050,15 @@ .file("li/src/main.rs", "fn main() {}") .build(); - p.cargo("publish -p li --no-verify --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("li", "0.0.1").file("src/lib.rs", "").publish(); + + p.cargo("publish -p li --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -1815,6 +2066,7 @@ See [..] [PACKAGING] li v0.0.1 ([CWD]/li) [UPLOADING] li v0.0.1 ([CWD]/li) +[UPDATING] crates.io index ", ) .run(); @@ -1824,7 +2076,8 @@ #[cargo_test] fn in_virtual_workspace() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( @@ -1837,7 +2090,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1848,7 +2101,8 @@ .file("foo/src/main.rs", "fn main() {}") .build(); - p.cargo("publish --no-verify --token sekrit") + p.cargo("publish --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "error: the `-p` argument must be specified in the root of a virtual workspace", @@ -1858,7 +2112,8 @@ #[cargo_test] fn in_virtual_workspace_with_p() { - registry::init(); + // `publish` generally requires a remote registry + let registry = registry::RegistryBuilder::new().http_api().build(); let p = project() .file( @@ -1871,7 +2126,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1893,7 +2148,8 @@ .file("li/src/main.rs", "fn main() {}") .build(); - p.cargo("publish -p li --no-verify --token sekrit") + p.cargo("publish -p li --no-verify") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -1901,6 +2157,7 @@ See [..] [PACKAGING] li v0.0.1 ([CWD]/li) [UPLOADING] li v0.0.1 ([CWD]/li) +[UPDATING] crates.io index ", ) .run(); @@ -1908,7 +2165,8 @@ #[cargo_test] fn in_package_workspace_not_found() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( @@ -1937,7 +2195,8 @@ .file("li/src/main.rs", "fn main() {}") .build(); - p.cargo("publish -p li --no-verify --token sekrit ") + p.cargo("publish -p li --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -1951,7 +2210,8 @@ #[cargo_test] fn in_package_workspace_found_multiple() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( @@ -1994,7 +2254,8 @@ .file("lii/src/main.rs", "fn main() {}") .build(); - p.cargo("publish -p li* --no-verify --token sekrit ") + p.cargo("publish -p li* --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -2007,7 +2268,8 @@ #[cargo_test] // https://github.com/rust-lang/cargo/issues/10536 fn publish_path_dependency_without_workspace() { - registry::init(); + // Use local registry for faster test times since no publish will occur + let registry = registry::init(); let p = project() .file( @@ -2037,7 +2299,8 @@ .file("bar/src/main.rs", "fn main() {}") .build(); - p.cargo("publish -p bar --no-verify --token sekrit ") + p.cargo("publish -p bar --no-verify") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( "\ @@ -2047,4 +2310,281 @@ ", ) .run(); +} + +#[cargo_test] +fn http_api_not_noop() { + let registry = registry::RegistryBuilder::new().http_api().build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish") + .replace_crates_io(registry.index_url()) + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[VERIFYING] foo v0.0.1 ([CWD]) +[..] +[..] +[UPLOADING] foo v0.0.1 ([CWD]) +[UPDATING] [..] +", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies] + foo = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn wait_for_publish() { + // Counter for number of tries before the package is "published" + let arc: Arc> = Arc::new(Mutex::new(0)); + let arc2 = arc.clone(); + + // Registry returns an invalid response. + let registry = registry::RegistryBuilder::new() + .http_index() + .http_api() + .add_responder("/index/de/la/delay", move |req, server| { + let mut lock = arc.lock().unwrap(); + *lock += 1; + // if the package name contains _ or - + if *lock <= 1 { + server.not_found(req) + } else { + server.index(req) + } + }) + .build(); + + // The sparse-registry test server does not know how to publish on its own. + // So let us call publish for it. + Package::new("delay", "0.0.1") + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "delay" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("publish --no-verify -Z sparse-registry") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .replace_crates_io(registry.index_url()) + .with_status(0) + .with_stderr( + "\ +[UPDATING] crates.io index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] delay v0.0.1 ([CWD]) +[UPLOADING] delay v0.0.1 ([CWD]) +[UPDATING] crates.io index +[WAITING] on `delay` to propagate to crates.io index (ctrl-c to wait asynchronously) +", + ) + .run(); + + // Verify the responder has been pinged + let lock = arc2.lock().unwrap(); + assert_eq!(*lock, 2); + drop(lock); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + [dependencies] + delay = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -Z sparse-registry") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .with_status(0) + .run(); +} + +/// A separate test is needed for package names with - or _ as they hit +/// the responder twice per cargo invocation. If that ever gets changed +/// this test will need to be changed accordingly. +#[cargo_test] +fn wait_for_publish_underscore() { + // Counter for number of tries before the package is "published" + let arc: Arc> = Arc::new(Mutex::new(0)); + let arc2 = arc.clone(); + + // Registry returns an invalid response. + let registry = registry::RegistryBuilder::new() + .http_index() + .http_api() + .add_responder("/index/de/la/delay_with_underscore", move |req, server| { + let mut lock = arc.lock().unwrap(); + *lock += 1; + // package names with - or _ hit the responder twice per cargo invocation + if *lock <= 2 { + server.not_found(req) + } else { + server.index(req) + } + }) + .build(); + + // The sparse-registry test server does not know how to publish on its own. + // So let us call publish for it. + Package::new("delay_with_underscore", "0.0.1") + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "delay_with_underscore" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("publish --no-verify -Z sparse-registry") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .replace_crates_io(registry.index_url()) + .with_status(0) + .with_stderr( + "\ +[UPDATING] crates.io index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] delay_with_underscore v0.0.1 ([CWD]) +[UPLOADING] delay_with_underscore v0.0.1 ([CWD]) +[UPDATING] crates.io index +[WAITING] on `delay_with_underscore` to propagate to crates.io index (ctrl-c to wait asynchronously) +", + ) + .run(); + + // Verify the repsponder has been pinged + let lock = arc2.lock().unwrap(); + // NOTE: package names with - or _ hit the responder twice per cargo invocation + assert_eq!(*lock, 3); + drop(lock); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + [dependencies] + delay_with_underscore = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -Z sparse-registry") + .masquerade_as_nightly_cargo(&["sparse-registry"]) + .with_status(0) + .run(); +} + +#[cargo_test] +fn skip_wait_for_publish() { + // Intentionally using local registry so the crate never makes it to the index + let registry = registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + " + [publish] + timeout = 0 + ", + ) + .build(); + + p.cargo("publish --no-verify -Zpublish-timeout") + .replace_crates_io(registry.index_url()) + .masquerade_as_nightly_cargo(&["publish-timeout"]) + .with_stderr( + "\ +[UPDATING] crates.io index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[UPLOADING] foo v0.0.1 ([CWD]) +", + ) + .run(); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/registry.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/registry.rs 2023-01-10 13:41:19.000000000 +0000 @@ -11,6 +11,7 @@ use cargo_util::paths::remove_dir_all; use std::fs::{self, File}; use std::path::Path; +use std::sync::Mutex; fn cargo_http(p: &Project, s: &str) -> Execs { let mut e = p.cargo(s); @@ -43,7 +44,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -102,7 +103,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -153,7 +154,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -196,7 +197,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -242,7 +243,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -286,7 +287,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -345,7 +346,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -395,7 +396,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -452,7 +453,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -520,7 +521,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -569,7 +570,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -622,7 +623,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -674,7 +675,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -720,7 +721,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -770,7 +771,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -834,7 +835,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -858,7 +859,7 @@ p.change_file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -888,7 +889,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -933,7 +934,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1040,7 +1041,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1073,10 +1074,12 @@ fn login_with_no_cargo_dir() { // Create a config in the root directory because `login` requires the // index to be updated, and we don't want to hit crates.io. - registry::init(); + let registry = registry::init(); fs::rename(paths::home().join(".cargo"), paths::root().join(".cargo")).unwrap(); paths::home().rm_rf(); - cargo_process("login foo -v").run(); + cargo_process("login foo -v") + .replace_crates_io(registry.index_url()) + .run(); let credentials = fs::read_to_string(paths::home().join(".cargo/credentials")).unwrap(); assert_eq!(credentials, "[registry]\ntoken = \"foo\"\n"); } @@ -1084,23 +1087,32 @@ #[cargo_test] fn login_with_differently_sized_token() { // Verify that the configuration file gets properly truncated. - registry::init(); + let registry = registry::init(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); - cargo_process("login lmaolmaolmao -v").run(); - cargo_process("login lmao -v").run(); - cargo_process("login lmaolmaolmao -v").run(); + cargo_process("login lmaolmaolmao -v") + .replace_crates_io(registry.index_url()) + .run(); + cargo_process("login lmao -v") + .replace_crates_io(registry.index_url()) + .run(); + cargo_process("login lmaolmaolmao -v") + .replace_crates_io(registry.index_url()) + .run(); let credentials = fs::read_to_string(&credentials).unwrap(); assert_eq!(credentials, "[registry]\ntoken = \"lmaolmaolmao\"\n"); } #[cargo_test] fn login_with_token_on_stdin() { - registry::init(); + let registry = registry::init(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); - cargo_process("login lmao -v").run(); + cargo_process("login lmao -v") + .replace_crates_io(registry.index_url()) + .run(); cargo_process("login") + .replace_crates_io(registry.index_url()) .with_stdout("please paste the API Token found on [..]/me below") .with_stdin("some token") .run(); @@ -1110,22 +1122,23 @@ #[cargo_test] fn bad_license_file_http() { - let _server = setup_http(); - bad_license_file(cargo_http); + let registry = setup_http(); + bad_license_file(cargo_http, ®istry); } #[cargo_test] fn bad_license_file_git() { - bad_license_file(cargo_stable); + let registry = registry::init(); + bad_license_file(cargo_stable, ®istry); } -fn bad_license_file(cargo: fn(&Project, &str) -> Execs) { +fn bad_license_file(cargo: fn(&Project, &str) -> Execs, registry: &TestRegistry) { Package::new("foo", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1136,7 +1149,8 @@ ) .file("src/main.rs", "fn main() {}") .build(); - cargo(&p, "publish -v --token sekrit") + cargo(&p, "publish -v") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr_contains("[ERROR] the license file `foo` does not exist") .run(); @@ -1158,7 +1172,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1171,7 +1185,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.0.1" authors = [] @@ -1209,7 +1223,7 @@ p.change_file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.0.1" authors = [] @@ -1257,7 +1271,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "b" version = "0.0.1" authors = [] @@ -1273,7 +1287,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1331,7 +1345,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1359,7 +1373,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1411,7 +1425,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1452,7 +1466,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1510,7 +1524,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1569,7 +1583,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1639,7 +1653,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = [] @@ -1691,7 +1705,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "ugh" version = "0.5.0" authors = [] @@ -1728,7 +1742,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1768,7 +1782,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.1.1-alpha.0" authors = [] @@ -1777,7 +1791,7 @@ .file( "b/Cargo.toml", r#" - [project] + [package] name = "b" version = "0.1.0" authors = [] @@ -1821,7 +1835,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1871,7 +1885,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1907,7 +1921,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1935,7 +1949,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -1970,7 +1984,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -2017,7 +2031,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -2030,7 +2044,7 @@ .file( "baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.5.0" authors = [] @@ -2049,7 +2063,7 @@ p.change_file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -2086,7 +2100,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -2099,7 +2113,7 @@ .file( "baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.5.0" authors = [] @@ -2118,7 +2132,7 @@ p.change_file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.6.0" authors = [] @@ -2156,7 +2170,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.3.0" authors = [] @@ -2172,7 +2186,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = [] @@ -2210,7 +2224,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "fo" version = "0.5.0" authors = [] @@ -2245,7 +2259,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "fo" version = "0.5.0" authors = [] @@ -2295,7 +2309,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "fo" version = "0.5.0" authors = [] @@ -2369,7 +2383,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2412,7 +2426,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2446,7 +2460,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "a" version = "0.5.0" authors = [] @@ -2552,7 +2566,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2584,6 +2598,47 @@ } #[cargo_test] +fn package_lock_as_a_symlink_inside_package_is_overwritten() { + let registry = registry::init(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1") + .file("src/lib.rs", "pub fn f() {}") + .symlink(".cargo-ok", "src/lib.rs") + .publish(); + + p.cargo("build").run(); + + let id = SourceId::for_registry(registry.index_url()).unwrap(); + let hash = cargo::util::hex::short_hash(&id); + let pkg_root = cargo_home() + .join("registry") + .join("src") + .join(format!("-{}", hash)) + .join("bar-0.0.1"); + let ok = pkg_root.join(".cargo-ok"); + let librs = pkg_root.join("src/lib.rs"); + + // Is correctly overwritten and doesn't affect the file linked to + assert_eq!(ok.metadata().unwrap().len(), 2); + assert_eq!(fs::read_to_string(librs).unwrap(), "pub fn f() {}"); +} + +#[cargo_test] fn ignores_unknown_index_version_http() { let _server = setup_http(); ignores_unknown_index_version(cargo_http); @@ -2630,7 +2685,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2649,10 +2704,240 @@ } #[cargo_test] +fn protocol_sparse_requires_z_flag() { + cargo_process("install bar") + .with_status(101) + .env("CARGO_REGISTRIES_CRATES_IO_PROTOCOL", "sparse") + .with_stderr("[ERROR] usage of sparse registries requires `-Z sparse-registry`") + .run() +} + +#[cargo_test] +fn protocol() { + cargo_process("install bar") + .with_status(101) + .env("CARGO_REGISTRIES_CRATES_IO_PROTOCOL", "invalid") + .with_stderr("[ERROR] unsupported registry protocol `invalid` (defined in environment variable `CARGO_REGISTRIES_CRATES_IO_PROTOCOL`)") + .run() +} + +#[cargo_test] fn http_requires_trailing_slash() { - cargo_process("-Z sparse-registry install bar --index sparse+https://index.crates.io") + cargo_process("-Z sparse-registry install bar --index sparse+https://invalid.crates.io/test") .masquerade_as_nightly_cargo(&["sparse-registry"]) .with_status(101) - .with_stderr("[ERROR] registry url must end in a slash `/`: sparse+https://index.crates.io") + .with_stderr("[ERROR] sparse registry url must end in a slash `/`: sparse+https://invalid.crates.io/test") .run() } + +// Limit the test to debug builds so that `__CARGO_TEST_MAX_UNPACK_SIZE` will take affect. +#[cfg(debug_assertions)] +#[cargo_test] +fn reach_max_unpack_size() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build") + .env("__CARGO_TEST_MAX_UNPACK_SIZE", "8") // hit 8 bytes limit and boom! + .with_status(101) + .with_stderr( + "\ +[UPDATING] `dummy-registry` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) +[ERROR] failed to download replaced source registry `crates-io` + +Caused by: + failed to unpack package `bar v0.0.1 (registry `dummy-registry`)` + +Caused by: + failed to iterate over archive + +Caused by: + maximum limit reached when reading +", + ) + .run(); +} + +#[cargo_test] +fn sparse_retry() { + let fail_count = Mutex::new(0); + let _registry = RegistryBuilder::new() + .http_index() + .add_responder("/index/3/b/bar", move |req, server| { + let mut fail_count = fail_count.lock().unwrap(); + if *fail_count < 2 { + *fail_count += 1; + server.internal_server_error(req) + } else { + server.index(req) + } + }) + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + cargo_http(&p, "build") + .with_stderr( + "\ +[UPDATING] `dummy-registry` index +warning: spurious network error (2 tries remaining): failed to get successful HTTP response from `[..]`, got 500 +body: +internal server error +warning: spurious network error (1 tries remaining): failed to get successful HTTP response from `[..]`, got 500 +body: +internal server error +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn deleted_entry() { + // Checks the behavior when a package is removed from the index. + // This is done occasionally on crates.io to handle things like + // copyright takedowns. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + // First, test removing a single version, but leaving an older version. + Package::new("bar", "0.1.0").publish(); + let bar_path = Path::new("3/b/bar"); + let bar_reg_path = registry_path().join(&bar_path); + let old_index = fs::read_to_string(&bar_reg_path).unwrap(); + Package::new("bar", "0.1.1").publish(); + p.cargo("tree") + .with_stderr( + "\ +[UPDATING] `dummy-registry` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.1 (registry `dummy-registry`) +", + ) + .with_stdout( + "\ +foo v0.1.0 ([ROOT]/foo) +└── bar v0.1.1 +", + ) + .run(); + + // Remove 0.1.1 + fs::remove_file(paths::root().join("dl/bar/0.1.1/download")).unwrap(); + let repo = git2::Repository::open(registry_path()).unwrap(); + let mut index = repo.index().unwrap(); + fs::write(&bar_reg_path, &old_index).unwrap(); + index.add_path(&bar_path).unwrap(); + index.write().unwrap(); + git::commit(&repo); + + // With `Cargo.lock` unchanged, it shouldn't have an impact. + p.cargo("tree") + .with_stderr("") + .with_stdout( + "\ +foo v0.1.0 ([ROOT]/foo) +└── bar v0.1.1 +", + ) + .run(); + + // Regenerating Cargo.lock should switch to old version. + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + p.cargo("tree") + .with_stderr( + "\ +[UPDATING] `dummy-registry` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 (registry `dummy-registry`) +", + ) + .with_stdout( + "\ +foo v0.1.0 ([ROOT]/foo) +└── bar v0.1.0 +", + ) + .run(); + + // Remove the package entirely. + fs::remove_file(paths::root().join("dl/bar/0.1.0/download")).unwrap(); + let mut index = repo.index().unwrap(); + index.remove(&bar_path, 0).unwrap(); + index.write().unwrap(); + git::commit(&repo); + fs::remove_file(&bar_reg_path).unwrap(); + + // With `Cargo.lock` unchanged, it shouldn't have an impact. + p.cargo("tree") + .with_stderr("") + .with_stdout( + "\ +foo v0.1.0 ([ROOT]/foo) +└── bar v0.1.0 +", + ) + .run(); + + // Regenerating Cargo.lock should fail. + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + p.cargo("tree") + .with_stderr( + "\ +[UPDATING] `dummy-registry` index +error: no matching package named `bar` found +location searched: registry `crates-io` +required by package `foo v0.1.0 ([ROOT]/foo)` +", + ) + .with_status(101) + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/rename_deps.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/rename_deps.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/rename_deps.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/rename_deps.rs 2023-01-10 13:41:19.000000000 +0000 @@ -14,7 +14,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -36,7 +36,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -49,7 +49,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.0.1" authors = [] @@ -244,7 +244,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/required_features.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/required_features.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/required_features.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/required_features.rs 2023-01-10 13:41:19.000000000 +0000 @@ -13,7 +13,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -68,7 +68,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -94,7 +94,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -139,7 +139,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -176,7 +176,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -202,7 +202,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -272,7 +272,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -330,7 +330,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -363,7 +363,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -420,7 +420,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -488,7 +488,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -531,7 +531,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -608,7 +608,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -692,7 +692,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -719,7 +719,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -837,7 +837,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -879,7 +879,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.0.1" authors = [] @@ -938,7 +938,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -990,7 +990,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.0.1" authors = [] @@ -1096,7 +1096,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1162,7 +1162,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1199,7 +1199,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/run.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/run.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/run.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/run.rs 2023-01-10 13:41:19.000000000 +0000 @@ -321,7 +321,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -344,7 +344,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -453,7 +453,7 @@ "Cargo.toml", &format!( r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -562,7 +562,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" autobins = false @@ -699,7 +699,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -804,7 +804,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1078,7 +1078,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1148,7 +1148,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1207,7 +1207,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1314,9 +1314,16 @@ cargo().with_stdout("foo").run(); - cargo().arg("-p").arg("d1").arg("-p").arg("d2") - .with_status(1) - .with_stderr_contains("error: The argument '--package [...]' was provided more than once, but cannot be used multiple times").run(); + cargo() + .arg("-p") + .arg("d1") + .arg("-p") + .arg("d2") + .with_status(1) + .with_stderr_contains( + "error: The argument '--package []' was provided more than once, but cannot be used multiple times", + ) + .run(); cargo() .arg("-p") @@ -1392,7 +1399,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.0.1" default-run = "a" @@ -1418,7 +1425,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" [lib] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/rustc.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/rustc.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/rustc.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/rustc.rs 2023-01-10 13:41:19.000000000 +0000 @@ -551,8 +551,7 @@ .with_status(1) .with_stderr_contains( "\ -error: The argument '--package [...]' was provided more than once, \ - but cannot be used multiple times +error: The argument '--package []' was provided more than once, but cannot be used multiple times ", ) .run(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/rust_version.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/rust_version.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/rust_version.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/rust_version.rs 2023-01-10 13:41:19.000000000 +0000 @@ -8,7 +8,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -30,7 +30,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -56,7 +56,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -82,7 +82,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -108,7 +108,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -174,7 +174,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/search.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/search.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/search.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/search.rs 2023-01-10 13:41:19.000000000 +0000 @@ -78,7 +78,7 @@ fn setup() -> RegistryBuilder { RegistryBuilder::new() .http_api() - .add_responder("/api/v1/crates", |_| Response { + .add_responder("/api/v1/crates", |_, _| Response { code: 200, headers: vec![], body: SEARCH_API_RESPONSE.to_vec(), @@ -106,6 +106,7 @@ drop(lock); cargo_process("search postgres") + .replace_crates_io(registry.index_url()) .with_stdout_contains(SEARCH_RESULTS) .with_stderr("") // without "Updating ... index" .run(); @@ -113,9 +114,10 @@ #[cargo_test] fn replace_default() { - let _server = setup().build(); + let registry = setup().build(); cargo_process("search postgres") + .replace_crates_io(registry.index_url()) .with_stdout_contains(SEARCH_RESULTS) .with_stderr_contains("[..]Updating [..] index") .run(); @@ -143,22 +145,25 @@ #[cargo_test] fn ignore_quiet() { - let _server = setup().build(); + let registry = setup().build(); cargo_process("search -q postgres") + .replace_crates_io(registry.index_url()) .with_stdout_contains(SEARCH_RESULTS) .run(); } #[cargo_test] fn colored_results() { - let _server = setup().build(); + let registry = setup().build(); cargo_process("search --color=never postgres") + .replace_crates_io(registry.index_url()) .with_stdout_does_not_contain("[..]\x1b[[..]") .run(); cargo_process("search --color=always postgres") + .replace_crates_io(registry.index_url()) .with_stdout_contains("[..]\x1b[[..]") .run(); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/shell_quoting.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/shell_quoting.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/shell_quoting.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/shell_quoting.rs 2023-01-10 13:41:19.000000000 +0000 @@ -10,7 +10,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = ["mikeyhew@example.com"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/source_replacement.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/source_replacement.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/source_replacement.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/source_replacement.rs 2023-01-10 13:41:19.000000000 +0000 @@ -0,0 +1,246 @@ +//! Tests for `[source]` table (source replacement). + +use std::fs; + +use cargo_test_support::registry::{Package, RegistryBuilder, TestRegistry}; +use cargo_test_support::{cargo_process, paths, project, t}; + +fn setup_replacement(config: &str) -> TestRegistry { + let crates_io = RegistryBuilder::new() + .no_configure_registry() + .http_api() + .build(); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(fs::write(root.join(".cargo/config"), config,)); + crates_io +} + +#[cargo_test] +fn crates_io_token_not_sent_to_replacement() { + // verifies that the crates.io token is not sent to a replacement registry during publish. + let crates_io = setup_replacement( + r#" + [source.crates-io] + replace-with = 'alternative' + "#, + ); + let _alternative = RegistryBuilder::new() + .alternative() + .http_api() + .no_configure_token() + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("publish --no-verify --registry crates-io") + .replace_crates_io(crates_io.index_url()) + .with_stderr_contains("[UPDATING] crates.io index") + .run(); +} + +#[cargo_test] +fn token_sent_to_correct_registry() { + // verifies that the crates.io token is not sent to a replacement registry during yank. + let crates_io = setup_replacement( + r#" + [source.crates-io] + replace-with = 'alternative' + "#, + ); + let _alternative = RegistryBuilder::new().alternative().http_api().build(); + + cargo_process("yank foo@0.0.1 --registry crates-io") + .replace_crates_io(crates_io.index_url()) + .with_stderr( + "\ +[UPDATING] crates.io index +[YANK] foo@0.0.1 +", + ) + .run(); + + cargo_process("yank foo@0.0.1 --registry alternative") + .replace_crates_io(crates_io.index_url()) + .with_stderr( + "\ +[UPDATING] `alternative` index +[YANK] foo@0.0.1 +", + ) + .run(); +} + +#[cargo_test] +fn ambiguous_registry() { + // verifies that an error is issued when a source-replacement is configured + // and no --registry argument is given. + let crates_io = setup_replacement( + r#" + [source.crates-io] + replace-with = 'alternative' + "#, + ); + let _alternative = RegistryBuilder::new() + .alternative() + .http_api() + .no_configure_token() + .build(); + + cargo_process("yank foo@0.0.1") + .replace_crates_io(crates_io.index_url()) + .with_status(101) + .with_stderr( + "\ +error: crates-io is replaced with remote registry alternative; +include `--registry alternative` or `--registry crates-io` +", + ) + .run(); +} + +#[cargo_test] +fn yank_with_default_crates_io() { + // verifies that no error is given when registry.default is used. + let crates_io = setup_replacement( + r#" + [source.crates-io] + replace-with = 'alternative' + + [registry] + default = 'crates-io' + "#, + ); + let _alternative = RegistryBuilder::new().alternative().http_api().build(); + + cargo_process("yank foo@0.0.1") + .replace_crates_io(crates_io.index_url()) + .with_stderr( + "\ +[UPDATING] crates.io index +[YANK] foo@0.0.1 +", + ) + .run(); +} + +#[cargo_test] +fn yank_with_default_alternative() { + // verifies that no error is given when registry.default is an alt registry. + let crates_io = setup_replacement( + r#" + [source.crates-io] + replace-with = 'alternative' + + [registry] + default = 'alternative' + "#, + ); + let _alternative = RegistryBuilder::new().alternative().http_api().build(); + + cargo_process("yank foo@0.0.1") + .replace_crates_io(crates_io.index_url()) + .with_stderr( + "\ +[UPDATING] `alternative` index +[YANK] foo@0.0.1 +", + ) + .run(); +} + +#[cargo_test] +fn publish_with_replacement() { + // verifies that the crates.io token is not sent to a replacement registry during publish. + let crates_io = setup_replacement( + r#" + [source.crates-io] + replace-with = 'alternative' + "#, + ); + let _alternative = RegistryBuilder::new() + .alternative() + .http_api() + .no_configure_token() + .build(); + + // Publish bar only to alternative. This tests that the publish verification build + // does uses the source replacement. + Package::new("bar", "1.0.0").alternative(true).publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies] + bar = "1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + // Verifies that the crates.io index is used to find the publishing endpoint + // and that the crate is sent to crates.io. The source replacement is only used + // for the verification step. + p.cargo("publish --registry crates-io") + .replace_crates_io(crates_io.index_url()) + .with_stderr( + "\ +[UPDATING] crates.io index +[WARNING] manifest has no documentation, homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[VERIFYING] foo v0.0.1 ([..]) +[UPDATING] `alternative` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v1.0.0 (registry `alternative`) +[COMPILING] bar v1.0.0 +[COMPILING] foo v0.0.1 ([..]foo-0.0.1) +[FINISHED] dev [..] +[UPLOADING] foo v0.0.1 ([..]) +[UPDATING] crates.io index +", + ) + .run(); +} + +#[cargo_test] +fn undefined_default() { + // verifies that no error is given when registry.default is used. + let crates_io = setup_replacement( + r#" + [registry] + default = 'undefined' + "#, + ); + + cargo_process("yank foo@0.0.1") + .replace_crates_io(crates_io.index_url()) + .with_status(101) + .with_stderr( + "[ERROR] no index found for registry: `undefined` +", + ) + .run(); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/test.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/test.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/test.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/test.rs 2023-01-10 13:41:19.000000000 +0000 @@ -493,7 +493,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -605,7 +605,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -656,7 +656,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -725,13 +725,101 @@ } #[cargo_test] -fn pass_through_command_line() { +fn pass_through_escaped() { + let p = project() + .file( + "src/lib.rs", + " + /// ```rust + /// assert!(foo::foo()); + /// ``` + pub fn foo() -> bool { + true + } + + /// ```rust + /// assert!(!foo::bar()); + /// ``` + pub fn bar() -> bool { + false + } + + #[test] fn test_foo() { + assert!(foo()); + } + #[test] fn test_bar() { + assert!(!bar()); + } + ", + ) + .build(); + + p.cargo("test -- bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] test [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) +[DOCTEST] foo +", + ) + .with_stdout_contains("running 1 test") + .with_stdout_contains("test test_bar ... ok") + .run(); + + p.cargo("test -- foo") + .with_stderr( + "\ +[FINISHED] test [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) +[DOCTEST] foo +", + ) + .with_stdout_contains("running 1 test") + .with_stdout_contains("test test_foo ... ok") + .run(); + + p.cargo("test -- foo bar") + .with_stderr( + "\ +[FINISHED] test [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) +[DOCTEST] foo +", + ) + .with_stdout_contains("running 2 tests") + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .run(); +} + +// Unlike `pass_through_escaped`, doctests won't run when using `testname` as an optimization +#[cargo_test] +fn pass_through_testname() { let p = project() .file( "src/lib.rs", " - #[test] fn foo() {} - #[test] fn bar() {} + /// ```rust + /// assert!(foo::foo()); + /// ``` + pub fn foo() -> bool { + true + } + + /// ```rust + /// assert!(!foo::bar()); + /// ``` + pub fn bar() -> bool { + false + } + + #[test] fn test_foo() { + assert!(foo()); + } + #[test] fn test_bar() { + assert!(!bar()); + } ", ) .build(); @@ -745,7 +833,7 @@ ", ) .with_stdout_contains("running 1 test") - .with_stdout_contains("test bar ... ok") + .with_stdout_contains("test test_bar ... ok") .run(); p.cargo("test foo") @@ -756,7 +844,19 @@ ", ) .with_stdout_contains("running 1 test") - .with_stdout_contains("test foo ... ok") + .with_stdout_contains("test test_foo ... ok") + .run(); + + p.cargo("test foo -- bar") + .with_stderr( + "\ +[FINISHED] test [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) +", + ) + .with_stdout_contains("running 2 tests") + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") .run(); } @@ -788,7 +888,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -1917,7 +2017,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -2921,7 +3021,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -2948,7 +3048,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -2977,7 +3077,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -3005,7 +3105,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -3034,7 +3134,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -3209,7 +3309,7 @@ .file( "a/Cargo.toml", r#" - [project] + [package] name = "a" version = "0.1.0" @@ -3233,7 +3333,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "a" version = "0.1.0" @@ -3319,7 +3419,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "a" version = "0.1.0" @@ -3344,7 +3444,7 @@ .file( "c/Cargo.toml", r#" - [project] + [package] name = "c" version = "0.1.0" @@ -3425,7 +3525,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -3446,7 +3546,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "testless" version = "0.1.0" exclude = ["tests/*"] @@ -3467,7 +3567,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -3495,7 +3595,7 @@ .file( "root/Cargo.toml", r#" - [project] + [package] name = "root" version = "0.1.0" authors = [] @@ -3517,7 +3617,7 @@ .file( "proc_macro_dep/Cargo.toml", r#" - [project] + [package] name = "proc_macro_dep" version = "0.1.0" authors = [] @@ -4415,7 +4515,7 @@ #[cargo_test] fn bin_env_for_test() { - // Test for the `CARGO_BIN_` environment variables for tests. + // Test for the `CARGO_BIN_EXE_` environment variables for tests. // // Note: The Unicode binary uses a `[[bin]]` definition because different // filesystems normalize utf-8 in different ways. For example, HFS uses diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/vendor.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/vendor.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/vendor.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/vendor.rs 2023-01-10 13:41:19.000000000 +0000 @@ -58,8 +58,7 @@ p.cargo("vendor --respect-source-config") .with_stdout( - r#" -[source.crates-io] + r#"[source.crates-io] replace-with = "vendored-sources" [source.vendored-sources] @@ -103,8 +102,7 @@ // path is normalized by `ops::vendor` on Windows. assert_eq!( &String::from_utf8(output.stdout).unwrap(), - r#" -[source.crates-io] + r#"[source.crates-io] replace-with = "vendored-sources" [source.vendored-sources] @@ -405,10 +403,9 @@ .with_stderr("\ error: Found argument 'test_vendor' which wasn't expected, or isn't valid in this context -USAGE: - cargo[EXE] vendor [OPTIONS] [path] +Usage: cargo[EXE] vendor [OPTIONS] [path] -For more information try --help", +For more information try '--help'", ) .with_status(1) .run(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/weak_dep_features.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/weak_dep_features.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/weak_dep_features.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/weak_dep_features.rs 2023-01-10 13:41:19.000000000 +0000 @@ -2,7 +2,7 @@ use super::features2::switch_to_resolver_2; use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::{Dependency, Package}; +use cargo_test_support::registry::{self, Dependency, Package}; use cargo_test_support::{project, publish}; use std::fmt::Write; @@ -523,6 +523,9 @@ #[cargo_test] fn publish() { + // HACK below allows us to use a local registry + let registry = registry::init(); + // Publish behavior with /? syntax. Package::new("bar", "1.0.0").feature("feat", &[]).publish(); let p = project() @@ -547,7 +550,17 @@ .file("src/lib.rs", "") .build(); - p.cargo("publish --token sekrit") + // HACK: Inject `foo` directly into the index so `publish` won't block for it to be in + // the index. + // + // This is to ensure we can verify the Summary we post to the registry as doing so precludes + // the registry from processing the publish. + Package::new("foo", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + p.cargo("publish") + .replace_crates_io(registry.index_url()) .with_stderr( "\ [UPDATING] [..] @@ -556,6 +569,7 @@ [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [UPLOADING] foo v0.1.0 [..] +[UPDATING] [..] ", ) .run(); @@ -573,7 +587,6 @@ "kind": "normal", "name": "bar", "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/workspaces.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/workspaces.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/workspaces.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/workspaces.rs 2023-01-10 13:41:19.000000000 +0000 @@ -11,7 +11,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -24,7 +24,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -52,7 +52,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -66,7 +66,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -87,7 +87,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -125,7 +125,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -154,7 +154,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -186,7 +186,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -221,7 +221,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -236,7 +236,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -278,7 +278,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -293,7 +293,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -316,7 +316,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -329,7 +329,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -357,7 +357,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -392,7 +392,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -421,7 +421,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -455,7 +455,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -475,7 +475,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -488,7 +488,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -518,7 +518,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -542,7 +542,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -555,7 +555,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -566,7 +566,7 @@ .file( "baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.1.0" authors = [] @@ -594,7 +594,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -605,7 +605,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -629,7 +629,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -645,7 +645,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -680,7 +680,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -693,7 +693,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -724,7 +724,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -740,7 +740,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -968,7 +968,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -1002,7 +1002,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1018,7 +1018,7 @@ .file( "p1/Cargo.toml", r#" - [project] + [package] name = "p1" version = "0.1.0" authors = [] @@ -1051,7 +1051,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1112,7 +1112,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1128,7 +1128,7 @@ .file( "baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.1.0" authors = [] @@ -1304,7 +1304,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.5.0" authors = ["mbrubeck@example.com"] @@ -1319,7 +1319,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.5.0" authors = ["mbrubeck@example.com"] @@ -1370,7 +1370,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1383,7 +1383,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -1403,7 +1403,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1433,7 +1433,7 @@ .file( "ws/Cargo.toml", r#" - [project] + [package] name = "ws" version = "0.1.0" authors = [] @@ -1459,7 +1459,7 @@ .file( "ws/Cargo.toml", r#" - [project] + [package] name = "ws" version = "0.1.0" authors = [] @@ -1475,7 +1475,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1491,7 +1491,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] workspace = "../ws" name = "bar" version = "0.1.0" @@ -1523,7 +1523,7 @@ .file( "ws/Cargo.toml", r#" - [project] + [package] name = "ws" version = "0.1.0" authors = [] @@ -1538,7 +1538,7 @@ .file( "foo/Cargo.toml", r#" - [project] + [package] workspace = "../ws" name = "foo" version = "0.1.0" @@ -1573,7 +1573,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "ws" version = "0.1.0" authors = [] @@ -1599,7 +1599,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "ws" version = "0.1.0" authors = [] @@ -1630,7 +1630,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "ws" version = "0.1.0" authors = [] @@ -1753,7 +1753,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1767,7 +1767,7 @@ .file( "crates/bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -1778,7 +1778,7 @@ .file( "crates/baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.1.0" authors = [] @@ -1789,7 +1789,7 @@ .file( "crates/qux/Cargo.toml", r#" - [project] + [package] name = "qux" version = "0.1.0" authors = [] @@ -1825,7 +1825,7 @@ fn glob_syntax_2() { let p = project() .file("Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1836,7 +1836,7 @@ "#) .file("src/main.rs", "fn main() {}") .file("crates/bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -1844,7 +1844,7 @@ "#) .file("crates/bar/src/main.rs", "fn main() {}") .file("crates/baz/Cargo.toml", r#" - [project] + [package] name = "baz" version = "0.1.0" authors = [] @@ -1852,7 +1852,7 @@ "#) .file("crates/baz/src/main.rs", "fn main() {}") .file("crates/qux/Cargo.toml", r#" - [project] + [package] name = "qux" version = "0.1.0" authors = [] @@ -1889,7 +1889,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -1939,7 +1939,7 @@ .file( "feat_lib/Cargo.toml", r#" - [project] + [package] name = "feat_lib" version = "0.1.0" authors = [] @@ -1952,7 +1952,7 @@ .file( "caller1/Cargo.toml", r#" - [project] + [package] name = "caller1" version = "0.1.0" authors = [] @@ -1966,7 +1966,7 @@ .file( "caller2/Cargo.toml", r#" - [project] + [package] name = "caller2" version = "0.1.0" authors = [] @@ -2365,7 +2365,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" @@ -2377,7 +2377,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" @@ -2420,7 +2420,7 @@ .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] @@ -2433,7 +2433,7 @@ .file( "bar/Cargo.toml", r#" - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -2492,7 +2492,7 @@ r#" [workspace] - [project] + [package] name = "bar" version = "0.1.0" authors = [] @@ -2509,7 +2509,7 @@ .file( "sub/foo/Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.1.0" authors = [] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/yank.rs cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/yank.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/tests/testsuite/yank.rs 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/tests/testsuite/yank.rs 2023-01-10 13:41:19.000000000 +0000 @@ -14,14 +14,14 @@ #[cargo_test] fn explicit_version() { - registry::init(); + let registry = registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -32,12 +32,15 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("yank --version 0.0.1 --token sekrit").run(); + p.cargo("yank --version 0.0.1") + .replace_crates_io(registry.index_url()) + .run(); - p.cargo("yank --undo --version 0.0.1 --token sekrit") + p.cargo("yank --undo --version 0.0.1") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( - " Updating `[..]` index + " Updating crates.io index Unyank foo@0.0.1 error: failed to undo a yank from the registry at file:///[..] @@ -49,14 +52,14 @@ #[cargo_test] fn inline_version() { - registry::init(); + let registry = registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -67,12 +70,15 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("yank foo@0.0.1 --token sekrit").run(); + p.cargo("yank foo@0.0.1") + .replace_crates_io(registry.index_url()) + .run(); - p.cargo("yank --undo foo@0.0.1 --token sekrit") + p.cargo("yank --undo foo@0.0.1") + .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr( - " Updating `[..]` index + " Updating crates.io index Unyank foo@0.0.1 error: failed to undo a yank from the registry at file:///[..] @@ -84,14 +90,13 @@ #[cargo_test] fn version_required() { - registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -102,7 +107,7 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("yank foo --token sekrit") + p.cargo("yank foo") .with_status(101) .with_stderr("error: `--version` is required") .run(); @@ -110,14 +115,13 @@ #[cargo_test] fn inline_version_without_name() { - registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -128,7 +132,7 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("yank @0.0.1 --token sekrit") + p.cargo("yank @0.0.1") .with_status(101) .with_stderr("error: missing crate name for `@0.0.1`") .run(); @@ -136,14 +140,13 @@ #[cargo_test] fn inline_and_explicit_version() { - registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" - [project] + [package] name = "foo" version = "0.0.1" authors = [] @@ -154,7 +157,7 @@ .file("src/main.rs", "fn main() {}") .build(); - p.cargo("yank foo@0.0.1 --version 0.0.1 --token sekrit") + p.cargo("yank foo@0.0.1 --version 0.0.1") .with_status(101) .with_stderr("error: cannot specify both `@0.0.1` and `--version`") .run(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/triagebot.toml cargo-0.67.1+ds0ubuntu0.libgit2/triagebot.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/triagebot.toml 2022-10-20 06:00:42.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/triagebot.toml 2023-01-10 13:41:19.000000000 +0000 @@ -1,5 +1,3 @@ -[assign] - [ping.windows] message = """\ Hey Windows Group! This bug has been identified as a good "Windows candidate". @@ -13,3 +11,11 @@ [shortcut] +[autolabel."S-waiting-on-review"] +new_pr = true + +[assign] +contributing_url = "https://rust-lang.github.io/cargo/contrib/" + +[assign.owners] +"*" = ["@ehuss", "@epage", "@weihanglo"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/.cargo-checksum.json cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/.cargo-checksum.json --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/.cargo-checksum.json 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/.cargo-checksum.json 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -{"files":{},"package":"ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"} \ No newline at end of file diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.lock cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.lock --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.lock 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 @@ -1,136 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "bstr" -version = "0.2.17" -dependencies = [ - "lazy_static", - "memchr", - "quickcheck", - "regex-automata", - "serde", - "ucd-parse", - "unicode-segmentation", -] - -[[package]] -name = "byteorder" -version = "1.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "getrandom" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "libc" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2448f6066e80e3bfc792e9c98bf705b4b0fc6e8ef5b43e5889aff0eaa9c58743" - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "quickcheck" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" -dependencies = [ - "rand", -] - -[[package]] -name = "rand" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" -dependencies = [ - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" -dependencies = [ - "getrandom", -] - -[[package]] -name = "regex" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8963b85b8ce3074fecffde43b4b0dded83ce2f367dc8d363afc56679f3ee820b" -dependencies = [ - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4" -dependencies = [ - "byteorder", -] - -[[package]] -name = "regex-syntax" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cab7a364d15cde1e505267766a2d3c4e22a843e1a601f0fa7564c0f82ced11c" - -[[package]] -name = "serde" -version = "1.0.117" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a" - -[[package]] -name = "ucd-parse" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5269f8d35df6b8b60758343a6d742ecf09e4bca13faee32af5503aebd1e11b7c" -dependencies = [ - "lazy_static", - "regex", -] - -[[package]] -name = "unicode-segmentation" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0" - -[[package]] -name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.toml 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 @@ -1,63 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "bstr" -version = "0.2.17" -authors = ["Andrew Gallant "] -exclude = ["/.github"] -description = "A string type that is not required to be valid UTF-8." -homepage = "https://github.com/BurntSushi/bstr" -documentation = "https://docs.rs/bstr" -readme = "README.md" -keywords = ["string", "str", "byte", "bytes", "text"] -categories = ["text-processing", "encoding"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/BurntSushi/bstr" -[profile.release] -debug = true - -[lib] -bench = false -[dependencies.lazy_static] -version = "1.2.0" -optional = true - -[dependencies.memchr] -version = "2.4.0" -default-features = false - -[dependencies.regex-automata] -version = "0.1.5" -optional = true -default-features = false - -[dependencies.serde] -version = "1.0.85" -optional = true -default-features = false -[dev-dependencies.quickcheck] -version = "1" -default-features = false - -[dev-dependencies.ucd-parse] -version = "0.1.3" - -[dev-dependencies.unicode-segmentation] -version = "1.2.1" - -[features] -default = ["std", "unicode"] -serde1 = ["std", "serde1-nostd", "serde/std"] -serde1-nostd = ["serde"] -std = ["memchr/std"] -unicode = ["lazy_static", "regex-automata"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/COPYING cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/COPYING --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/COPYING 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/COPYING 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -This project is licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or - http://opensource.org/licenses/MIT) - -at your option. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use std::error::Error; -use std::io::{self, Write}; - -use bstr::{io::BufReadExt, ByteSlice}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdout = io::BufWriter::new(io::stdout()); - - stdin.lock().for_byte_line_with_terminator(|line| { - let end = line - .grapheme_indices() - .map(|(_, end, _)| end) - .take(10) - .last() - .unwrap_or(line.len()); - stdout.write_all(line[..end].trim_end())?; - stdout.write_all(b"\n")?; - Ok(true) - })?; - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes-std.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes-std.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes-std.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/graphemes-std.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -use std::error::Error; -use std::io::{self, BufRead, Write}; - -use unicode_segmentation::UnicodeSegmentation; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdin = stdin.lock(); - let mut stdout = io::BufWriter::new(io::stdout()); - - let mut line = String::new(); - while stdin.read_line(&mut line)? > 0 { - let end = line - .grapheme_indices(true) - .map(|(start, g)| start + g.len()) - .take(10) - .last() - .unwrap_or(line.len()); - stdout.write_all(line[..end].trim_end().as_bytes())?; - stdout.write_all(b"\n")?; - - line.clear(); - } - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use std::error::Error; -use std::io::{self, Write}; - -use bstr::{io::BufReadExt, ByteSlice}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdout = io::BufWriter::new(io::stdout()); - - stdin.lock().for_byte_line_with_terminator(|line| { - if line.contains_str("Dimension") { - stdout.write_all(line)?; - } - Ok(true) - })?; - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines-std.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines-std.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines-std.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/lines-std.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use std::error::Error; -use std::io::{self, BufRead, Write}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdin = stdin.lock(); - let mut stdout = io::BufWriter::new(io::stdout()); - - let mut line = String::new(); - while stdin.read_line(&mut line)? > 0 { - if line.contains("Dimension") { - stdout.write_all(line.as_bytes())?; - } - line.clear(); - } - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use std::error::Error; -use std::io::{self, Write}; - -use bstr::{io::BufReadExt, ByteSlice}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdout = io::BufWriter::new(io::stdout()); - - let mut upper = vec![]; - stdin.lock().for_byte_line_with_terminator(|line| { - upper.clear(); - line.to_uppercase_into(&mut upper); - stdout.write_all(&upper)?; - Ok(true) - })?; - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase-std.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase-std.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase-std.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/uppercase-std.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use std::error::Error; -use std::io::{self, BufRead, Write}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdin = stdin.lock(); - let mut stdout = io::BufWriter::new(io::stdout()); - - let mut line = String::new(); - while stdin.read_line(&mut line)? > 0 { - stdout.write_all(line.to_uppercase().as_bytes())?; - line.clear(); - } - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use std::error::Error; -use std::io; - -use bstr::{io::BufReadExt, ByteSlice}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut words = 0; - stdin.lock().for_byte_line_with_terminator(|line| { - words += line.words().count(); - Ok(true) - })?; - println!("{}", words); - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words-std.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words-std.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words-std.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/examples/words-std.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use std::error::Error; -use std::io::{self, BufRead}; - -use unicode_segmentation::UnicodeSegmentation; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdin = stdin.lock(); - - let mut words = 0; - let mut line = String::new(); - while stdin.read_line(&mut line)? > 0 { - words += line.unicode_words().count(); - line.clear(); - } - println!("{}", words); - Ok(()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-APACHE cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-APACHE --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-APACHE 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-APACHE 1970-01-01 00:00:00.000000000 +0000 @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-MIT cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-MIT --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-MIT 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/LICENSE-MIT 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018-2019 Andrew Gallant - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/README.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/README.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/README.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,251 +0,0 @@ -bstr -==== -This crate provides extension traits for `&[u8]` and `Vec` that enable -their use as byte strings, where byte strings are _conventionally_ UTF-8. This -differs from the standard library's `String` and `str` types in that they are -not required to be valid UTF-8, but may be fully or partially valid UTF-8. - -[![Build status](https://github.com/BurntSushi/bstr/workflows/ci/badge.svg)](https://github.com/BurntSushi/bstr/actions) -[![](https://meritbadge.herokuapp.com/bstr)](https://crates.io/crates/bstr) - - -### Documentation - -https://docs.rs/bstr - - -### When should I use byte strings? - -See this part of the documentation for more details: -https://docs.rs/bstr/0.2.*/bstr/#when-should-i-use-byte-strings. - -The short story is that byte strings are useful when it is inconvenient or -incorrect to require valid UTF-8. - - -### Usage - -Add this to your `Cargo.toml`: - -```toml -[dependencies] -bstr = "0.2" -``` - - -### Examples - -The following two examples exhibit both the API features of byte strings and -the I/O convenience functions provided for reading line-by-line quickly. - -This first example simply shows how to efficiently iterate over lines in -stdin, and print out lines containing a particular substring: - -```rust -use std::error::Error; -use std::io::{self, Write}; - -use bstr::{ByteSlice, io::BufReadExt}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdout = io::BufWriter::new(io::stdout()); - - stdin.lock().for_byte_line_with_terminator(|line| { - if line.contains_str("Dimension") { - stdout.write_all(line)?; - } - Ok(true) - })?; - Ok(()) -} -``` - -This example shows how to count all of the words (Unicode-aware) in stdin, -line-by-line: - -```rust -use std::error::Error; -use std::io; - -use bstr::{ByteSlice, io::BufReadExt}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut words = 0; - stdin.lock().for_byte_line_with_terminator(|line| { - words += line.words().count(); - Ok(true) - })?; - println!("{}", words); - Ok(()) -} -``` - -This example shows how to convert a stream on stdin to uppercase without -performing UTF-8 validation _and_ amortizing allocation. On standard ASCII -text, this is quite a bit faster than what you can (easily) do with standard -library APIs. (N.B. Any invalid UTF-8 bytes are passed through unchanged.) - -```rust -use std::error::Error; -use std::io::{self, Write}; - -use bstr::{ByteSlice, io::BufReadExt}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdout = io::BufWriter::new(io::stdout()); - - let mut upper = vec![]; - stdin.lock().for_byte_line_with_terminator(|line| { - upper.clear(); - line.to_uppercase_into(&mut upper); - stdout.write_all(&upper)?; - Ok(true) - })?; - Ok(()) -} -``` - -This example shows how to extract the first 10 visual characters (as grapheme -clusters) from each line, where invalid UTF-8 sequences are generally treated -as a single character and are passed through correctly: - -```rust -use std::error::Error; -use std::io::{self, Write}; - -use bstr::{ByteSlice, io::BufReadExt}; - -fn main() -> Result<(), Box> { - let stdin = io::stdin(); - let mut stdout = io::BufWriter::new(io::stdout()); - - stdin.lock().for_byte_line_with_terminator(|line| { - let end = line - .grapheme_indices() - .map(|(_, end, _)| end) - .take(10) - .last() - .unwrap_or(line.len()); - stdout.write_all(line[..end].trim_end())?; - stdout.write_all(b"\n")?; - Ok(true) - })?; - Ok(()) -} -``` - - -### Cargo features - -This crates comes with a few features that control standard library, serde -and Unicode support. - -* `std` - **Enabled** by default. This provides APIs that require the standard - library, such as `Vec`. -* `unicode` - **Enabled** by default. This provides APIs that require sizable - Unicode data compiled into the binary. This includes, but is not limited to, - grapheme/word/sentence segmenters. When this is disabled, basic support such - as UTF-8 decoding is still included. -* `serde1` - **Disabled** by default. Enables implementations of serde traits - for the `BStr` and `BString` types. -* `serde1-nostd` - **Disabled** by default. Enables implementations of serde - traits for the `BStr` type only, intended for use without the standard - library. Generally, you either want `serde1` or `serde1-nostd`, not both. - - -### Minimum Rust version policy - -This crate's minimum supported `rustc` version (MSRV) is `1.41.1`. - -In general, this crate will be conservative with respect to the minimum -supported version of Rust. MSRV may be bumped in minor version releases. - - -### Future work - -Since this is meant to be a core crate, getting a `1.0` release is a priority. -My hope is to move to `1.0` within the next year and commit to its API so that -`bstr` can be used as a public dependency. - -A large part of the API surface area was taken from the standard library, so -from an API design perspective, a good portion of this crate should be on solid -ground already. The main differences from the standard library are in how the -various substring search routines work. The standard library provides generic -infrastructure for supporting different types of searches with a single method, -where as this library prefers to define new methods for each type of search and -drop the generic infrastructure. - -Some _probable_ future considerations for APIs include, but are not limited to: - -* A convenience layer on top of the `aho-corasick` crate. -* Unicode normalization. -* More sophisticated support for dealing with Unicode case, perhaps by - combining the use cases supported by [`caseless`](https://docs.rs/caseless) - and [`unicase`](https://docs.rs/unicase). -* Add facilities for dealing with OS strings and file paths, probably via - simple conversion routines. - -Here are some examples that are _probably_ out of scope for this crate: - -* Regular expressions. -* Unicode collation. - -The exact scope isn't quite clear, but I expect we can iterate on it. - -In general, as stated below, this crate brings lots of related APIs together -into a single crate while simultaneously attempting to keep the total number of -dependencies low. Indeed, every dependency of `bstr`, except for `memchr`, is -optional. - - -### High level motivation - -Strictly speaking, the `bstr` crate provides very little that can't already be -achieved with the standard library `Vec`/`&[u8]` APIs and the ecosystem of -library crates. For example: - -* The standard library's - [`Utf8Error`](https://doc.rust-lang.org/std/str/struct.Utf8Error.html) - can be used for incremental lossy decoding of `&[u8]`. -* The - [`unicode-segmentation`](https://unicode-rs.github.io/unicode-segmentation/unicode_segmentation/index.html) - crate can be used for iterating over graphemes (or words), but is only - implemented for `&str` types. One could use `Utf8Error` above to implement - grapheme iteration with the same semantics as what `bstr` provides (automatic - Unicode replacement codepoint substitution). -* The [`twoway`](https://docs.rs/twoway) crate can be used for - fast substring searching on `&[u8]`. - -So why create `bstr`? Part of the point of the `bstr` crate is to provide a -uniform API of coupled components instead of relying on users to piece together -loosely coupled components from the crate ecosystem. For example, if you wanted -to perform a search and replace in a `Vec`, then writing the code to do -that with the `twoway` crate is not that difficult, but it's still additional -glue code you have to write. This work adds up depending on what you're doing. -Consider, for example, trimming and splitting, along with their different -variants. - -In other words, `bstr` is partially a way of pushing back against the -micro-crate ecosystem that appears to be evolving. Namely, it is a goal of -`bstr` to keep its dependency list lightweight. For example, `serde` is an -optional dependency because there is no feasible alternative. In service of -this philosophy, currently, the only required dependency of `bstr` is `memchr`. - - -### License - -This project is licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - https://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or - https://opensource.org/licenses/MIT) - -at your option. - -The data in `src/unicode/data/` is licensed under the Unicode License Agreement -([LICENSE-UNICODE](https://www.unicode.org/copyright.html#License)), although -this data is only used in tests. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/rustfmt.toml cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/rustfmt.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/rustfmt.toml 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/rustfmt.toml 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -max_width = 79 -use_small_heuristics = "max" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/generate-unicode-data cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/generate-unicode-data --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/generate-unicode-data 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/generate-unicode-data 1970-01-01 00:00:00.000000000 +0000 @@ -1,149 +0,0 @@ -#!/bin/sh - -set -e -D="$(dirname "$0")" - -# Convenience function for checking that a command exists. -requires() { - cmd="$1" - if ! command -v "$cmd" > /dev/null 2>&1; then - echo "DEPENDENCY MISSING: $cmd must be installed" >&2 - exit 1 - fi -} - -# Test if an array ($2) contains a particular element ($1). -array_exists() { - needle="$1" - shift - - for el in "$@"; do - if [ "$el" = "$needle" ]; then - return 0 - fi - done - return 1 -} - -graphemes() { - regex="$(sh "$D/regex/grapheme.sh")" - - echo "generating forward grapheme DFA" - ucd-generate dfa \ - --name GRAPHEME_BREAK_FWD \ - --sparse --minimize --anchored --state-size 2 \ - src/unicode/fsm/ \ - "$regex" - - echo "generating reverse grapheme DFA" - ucd-generate dfa \ - --name GRAPHEME_BREAK_REV \ - --reverse --longest \ - --sparse --minimize --anchored --state-size 2 \ - src/unicode/fsm/ \ - "$regex" -} - -words() { - regex="$(sh "$D/regex/word.sh")" - - echo "generating forward word DFA (this can take a while)" - ucd-generate dfa \ - --name WORD_BREAK_FWD \ - --sparse --minimize --anchored --state-size 4 \ - src/unicode/fsm/ \ - "$regex" -} - -sentences() { - regex="$(sh "$D/regex/sentence.sh")" - - echo "generating forward sentence DFA (this can take a while)" - ucd-generate dfa \ - --name SENTENCE_BREAK_FWD \ - --minimize \ - --sparse --anchored --state-size 4 \ - src/unicode/fsm/ \ - "$regex" -} - -regional_indicator() { - # For finding all occurrences of region indicators. This is used to handle - # regional indicators as a special case for the reverse grapheme iterator - # and the reverse word iterator. - echo "generating regional indicator DFA" - ucd-generate dfa \ - --name REGIONAL_INDICATOR_REV \ - --reverse \ - --classes --minimize --anchored --premultiply --state-size 1 \ - src/unicode/fsm/ \ - "\p{gcb=Regional_Indicator}" -} - -simple_word() { - echo "generating forward simple word DFA" - ucd-generate dfa \ - --name SIMPLE_WORD_FWD \ - --sparse --minimize --state-size 2 \ - src/unicode/fsm/ \ - "\w" -} - -whitespace() { - echo "generating forward whitespace DFA" - ucd-generate dfa \ - --name WHITESPACE_ANCHORED_FWD \ - --anchored --classes --premultiply --minimize --state-size 1 \ - src/unicode/fsm/ \ - "\s+" - - echo "generating reverse whitespace DFA" - ucd-generate dfa \ - --name WHITESPACE_ANCHORED_REV \ - --reverse \ - --anchored --classes --premultiply --minimize --state-size 2 \ - src/unicode/fsm/ \ - "\s+" -} - -main() { - if array_exists "-h" "$@" || array_exists "--help" "$@"; then - echo "Usage: $(basename "$0") [--list-commands] [] ..." >&2 - exit - fi - - commands=" - graphemes - sentences - words - regional-indicator - simple-word - whitespace - " - if array_exists "--list-commands" "$@"; then - for cmd in $commands; do - echo "$cmd" - done - exit - fi - - # ucd-generate is used to compile regexes into DFAs. - requires ucd-generate - - mkdir -p src/unicode/fsm/ - - cmds=$* - if [ $# -eq 0 ] || array_exists "all" "$@"; then - cmds=$commands - fi - for cmd in $cmds; do - if array_exists "$cmd" $commands; then - fun="$(echo "$cmd" | sed 's/-/_/g')" - eval "$fun" - else - echo "unrecognized command: $cmd" >&2 - fi - done -} - -main "$@" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/grapheme.sh cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/grapheme.sh --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/grapheme.sh 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/grapheme.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -#!/bin/sh - -# vim: indentexpr= nosmartindent autoindent -# vim: tabstop=2 shiftwidth=2 softtabstop=2 - -# This regex was manually written, derived from the rules in UAX #29. -# Particularly, from Table 1c, which lays out a regex for grapheme clusters. - -CR="\p{gcb=CR}" -LF="\p{gcb=LF}" -Control="\p{gcb=Control}" -Prepend="\p{gcb=Prepend}" -L="\p{gcb=L}" -V="\p{gcb=V}" -LV="\p{gcb=LV}" -LVT="\p{gcb=LVT}" -T="\p{gcb=T}" -RI="\p{gcb=RI}" -Extend="\p{gcb=Extend}" -ZWJ="\p{gcb=ZWJ}" -SpacingMark="\p{gcb=SpacingMark}" - -Any="\p{any}" -ExtendPict="\p{Extended_Pictographic}" - -echo "(?x) -$CR $LF -| -$Control -| -$Prepend* -( - ( - ($L* ($V+ | $LV $V* | $LVT) $T*) - | - $L+ - | - $T+ - ) - | - $RI $RI - | - $ExtendPict ($Extend* $ZWJ $ExtendPict)* - | - [^$Control $CR $LF] -) -[$Extend $ZWJ $SpacingMark]* -| -$Any -" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/sentence.sh cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/sentence.sh --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/sentence.sh 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/sentence.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,176 +0,0 @@ -#!/bin/sh - -# vim: indentexpr= nosmartindent autoindent -# vim: tabstop=2 shiftwidth=2 softtabstop=2 - -# This is a regex that I reverse engineered from the sentence boundary chain -# rules in UAX #29. Unlike the grapheme regex, which is essentially provided -# for us in UAX #29, no such sentence regex exists. -# -# I looked into how ICU achieves this, since UAX #29 hints that producing -# finite state machines for grapheme/sentence/word/line breaking is possible, -# but only easy to do for graphemes. ICU does this by implementing their own -# DSL for describing the break algorithms in terms of the chaining rules -# directly. You can see an example for sentences in -# icu4c/source/data/brkitr/rules/sent.txt. ICU then builds a finite state -# machine from those rules in a mostly standard way, but implements the -# "chaining" aspect of the rules by connecting overlapping end and start -# states. For example, given SB7: -# -# (Upper | Lower) ATerm x Upper -# -# Then the naive way to convert this into a regex would be something like -# -# [\p{sb=Upper}\p{sb=Lower}]\p{sb=ATerm}\p{sb=Upper} -# -# Unfortunately, this is incorrect. Why? Well, consider an example like so: -# -# U.S.A. -# -# A correct implementation of the sentence breaking algorithm should not insert -# any breaks here, exactly in accordance with repeatedly applying rule SB7 as -# given above. Our regex fails to do this because it will first match `U.S` -# without breaking them---which is correct---but will then start looking for -# its next rule beginning with a full stop (in ATerm) and followed by an -# uppercase letter (A). This will wind up triggering rule SB11 (without -# matching `A`), which inserts a break. -# -# The reason why this happens is because our initial application of rule SB7 -# "consumes" the next uppercase letter (S), which we want to reuse as a prefix -# in the next rule application. A natural way to express this would be with -# look-around, although it's not clear that works in every case since you -# ultimately might want to consume that ending uppercase letter. In any case, -# we can't use look-around in our truly regular regexes, so we must fix this. -# The approach we take is to explicitly repeat rules when a suffix of a rule -# is a prefix of another rule. In the case of SB7, the end of the rule, an -# uppercase letter, also happens to match the beginning of the rule. This can -# in turn be repeated indefinitely. Thus, our actual translation to a regex is: -# -# [\p{sb=Upper}\p{sb=Lower}]\p{sb=ATerm}\p{sb=Upper}(\p{sb=ATerm}\p{sb=Upper}* -# -# It turns out that this is exactly what ICU does, but in their case, they do -# it automatically. In our case, we connect the chaining rules manually. It's -# tedious. With that said, we do no implement Unicode line breaking with this -# approach, which is a far scarier beast. In that case, it would probably be -# worth writing the code to do what ICU does. -# -# In the case of sentence breaks, there aren't *too* many overlaps of this -# nature. We list them out exhaustively to make this clear, because it's -# essentially impossible to easily observe this in the regex. (It took me a -# full day to figure all of this out.) Rules marked with N/A mean that they -# specify a break, and this strategy only really applies to stringing together -# non-breaks. -# -# SB1 - N/A -# SB2 - N/A -# SB3 - None -# SB4 - N/A -# SB5 - None -# SB6 - None -# SB7 - End overlaps with beginning of SB7 -# SB8 - End overlaps with beginning of SB7 -# SB8a - End overlaps with beginning of SB6, SB8, SB8a, SB9, SB10, SB11 -# SB9 - None -# SB10 - None -# SB11 - None -# SB998 - N/A -# -# SB8a is in particular quite tricky to get right without look-ahead, since it -# allows ping-ponging between match rules SB8a and SB9-11, where SB9-11 -# otherwise indicate that a break has been found. In the regex below, we tackle -# this by only permitting part of SB8a to match inside our core non-breaking -# repetition. In particular, we only allow the parts of SB8a to match that -# permit the non-breaking components to continue. If a part of SB8a matches -# that guarantees a pop out to SB9-11, (like `STerm STerm`), then we let it -# happen. This still isn't correct because an SContinue might be seen which -# would allow moving back into SB998 and thus the non-breaking repetition, so -# we handle that case as well. -# -# Finally, the last complication here is the sprinkling of $Ex* everywhere. -# This essentially corresponds to the implementation of SB5 by following -# UAX #29's recommendation in S6.2. Essentially, we use it avoid ever breaking -# in the middle of a grapheme cluster. - -CR="\p{sb=CR}" -LF="\p{sb=LF}" -Sep="\p{sb=Sep}" -Close="\p{sb=Close}" -Sp="\p{sb=Sp}" -STerm="\p{sb=STerm}" -ATerm="\p{sb=ATerm}" -SContinue="\p{sb=SContinue}" -Numeric="\p{sb=Numeric}" -Upper="\p{sb=Upper}" -Lower="\p{sb=Lower}" -OLetter="\p{sb=OLetter}" - -Ex="[\p{sb=Extend}\p{sb=Format}]" -ParaSep="[$Sep $CR $LF]" -SATerm="[$STerm $ATerm]" - -LetterSepTerm="[$OLetter $Upper $Lower $ParaSep $SATerm]" - -echo "(?x) -( - # SB6 - $ATerm $Ex* - $Numeric - | - # SB7 - [$Upper $Lower] $Ex* $ATerm $Ex* - $Upper $Ex* - # overlap with SB7 - ($ATerm $Ex* $Upper $Ex*)* - | - # SB8 - $ATerm $Ex* $Close* $Ex* $Sp* $Ex* - ([^$LetterSepTerm] $Ex*)* $Lower $Ex* - # overlap with SB7 - ($ATerm $Ex* $Upper $Ex*)* - | - # SB8a - $SATerm $Ex* $Close* $Ex* $Sp* $Ex* - ( - $SContinue - | - $ATerm $Ex* - # Permit repetition of SB8a - (($Close $Ex*)* ($Sp $Ex*)* $SATerm)* - # In order to continue non-breaking matching, we now must observe - # a match with a rule that keeps us in SB6-8a. Otherwise, we've entered - # one of SB9-11 and know that a break must follow. - ( - # overlap with SB6 - $Numeric - | - # overlap with SB8 - ($Close $Ex*)* ($Sp $Ex*)* - ([^$LetterSepTerm] $Ex*)* $Lower $Ex* - # overlap with SB7 - ($ATerm $Ex* $Upper $Ex*)* - | - # overlap with SB8a - ($Close $Ex*)* ($Sp $Ex*)* $SContinue - ) - | - $STerm $Ex* - # Permit repetition of SB8a - (($Close $Ex*)* ($Sp $Ex*)* $SATerm)* - # As with ATerm above, in order to continue non-breaking matching, we - # must now observe a match with a rule that keeps us out of SB9-11. - # For STerm, the only such possibility is to see an SContinue. Anything - # else will result in a break. - ($Close $Ex*)* ($Sp $Ex*)* $SContinue - ) - | - # SB998 - # The logic behind this catch-all is that if we get to this point and - # see a Sep, CR, LF, STerm or ATerm, then it has to fall into one of - # SB9, SB10 or SB11. In the cases of SB9-11, we always find a break since - # SB11 acts as a catch-all to induce a break following a SATerm that isn't - # handled by rules SB6-SB8a. - [^$ParaSep $SATerm] -)* -# The following collapses rules SB3, SB4, part of SB8a, SB9, SB10 and SB11. -($SATerm $Ex* ($Close $Ex*)* ($Sp $Ex*)*)* ($CR $LF | $ParaSep)? -" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/word.sh cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/word.sh --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/word.sh 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/scripts/regex/word.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -#!/bin/sh - -# vim: indentexpr= nosmartindent autoindent -# vim: tabstop=2 shiftwidth=2 softtabstop=2 - -# See the comments in regex/sentence.sh for the general approach to how this -# regex was written. -# -# Writing the regex for this was *hard*. It took me two days of hacking to get -# this far, and that was after I had finished the sentence regex, so my brain -# was fully cached on this. Unlike the sentence regex, the rules in the regex -# below don't correspond as nicely to the rules in UAX #29. In particular, the -# UAX #29 rules have a ton of overlap with each other, which requires crazy -# stuff in the regex. I'm not even sure the regex below is 100% correct or even -# minimal, however, I did compare this with the ICU word segmenter on a few -# different corpora, and it produces identical results. (In addition to of -# course passing the UCD tests.) -# -# In general, I consider this approach to be a failure. Firstly, this is -# clearly a write-only regex. Secondly, building the minimized DFA for this is -# incredibly slow. Thirdly, the DFA is itself very large (~240KB). Fourthly, -# reversing this regex (for reverse word iteration) results in a >19MB DFA. -# Yes. That's MB. Wat. And it took 5 minutes to build. -# -# I think we might consider changing our approach to this problem. The normal -# path I've seen, I think, is to decode codepoints one at a time, and then -# thread them through a state machine in the code itself. We could take this -# approach, or possibly combine it with a DFA that tells us which Word_Break -# value a codepoint has. I'd prefer the latter approach, but it requires adding -# RegexSet support to regex-automata. Something that should definitely be done, -# but is a fair amount of work. -# -# Gah. - -CR="\p{wb=CR}" -LF="\p{wb=LF}" -Newline="\p{wb=Newline}" -ZWJ="\p{wb=ZWJ}" -RI="\p{wb=Regional_Indicator}" -Katakana="\p{wb=Katakana}" -HebrewLet="\p{wb=HebrewLetter}" -ALetter="\p{wb=ALetter}" -SingleQuote="\p{wb=SingleQuote}" -DoubleQuote="\p{wb=DoubleQuote}" -MidNumLet="\p{wb=MidNumLet}" -MidLetter="\p{wb=MidLetter}" -MidNum="\p{wb=MidNum}" -Numeric="\p{wb=Numeric}" -ExtendNumLet="\p{wb=ExtendNumLet}" -WSegSpace="\p{wb=WSegSpace}" - -Any="\p{any}" -Ex="[\p{wb=Extend} \p{wb=Format} $ZWJ]" -ExtendPict="\p{Extended_Pictographic}" -AHLetter="[$ALetter $HebrewLet]" -MidNumLetQ="[$MidNumLet $SingleQuote]" - -AHLetterRepeat="$AHLetter $Ex* ([$MidLetter $MidNumLetQ] $Ex* $AHLetter $Ex*)*" -NumericRepeat="$Numeric $Ex* ([$MidNum $MidNumLetQ] $Ex* $Numeric $Ex*)*" - -echo "(?x) -$CR $LF -| -[$Newline $CR $LF] -| -$WSegSpace $WSegSpace+ -| -( - ([^$Newline $CR $LF]? $Ex* $ZWJ $ExtendPict $Ex*)+ - | - ($ExtendNumLet $Ex*)* $AHLetter $Ex* - ( - ( - ($NumericRepeat | $ExtendNumLet $Ex*)* - | - [$MidLetter $MidNumLetQ] $Ex* - ) - $AHLetter $Ex* - )+ - ($NumericRepeat | $ExtendNumLet $Ex*)* - | - ($ExtendNumLet $Ex*)* $AHLetter $Ex* ($NumericRepeat | $ExtendNumLet $Ex*)+ - | - ($ExtendNumLet $Ex*)* $Numeric $Ex* - ( - ( - ($AHLetterRepeat | $ExtendNumLet $Ex*)* - | - [$MidNum $MidNumLetQ] $Ex* - ) - $Numeric $Ex* - )+ - ($AHLetterRepeat | $ExtendNumLet $Ex*)* - | - ($ExtendNumLet $Ex*)* $Numeric $Ex* ($AHLetterRepeat | $ExtendNumLet $Ex*)+ - | - $Katakana $Ex* - (($Katakana | $ExtendNumLet) $Ex*)+ - | - $ExtendNumLet $Ex* - (($ExtendNumLet | $AHLetter | $Numeric | $Katakana) $Ex*)+ -)+ -| -$HebrewLet $Ex* $SingleQuote $Ex* -| -($HebrewLet $Ex* $DoubleQuote $Ex*)+ $HebrewLet $Ex* -| -$RI $Ex* $RI $Ex* -| -$Any $Ex* -" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ascii.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ascii.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ascii.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ascii.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,336 +0,0 @@ -use core::mem; - -// The following ~400 lines of code exists for exactly one purpose, which is -// to optimize this code: -// -// byte_slice.iter().position(|&b| b > 0x7F).unwrap_or(byte_slice.len()) -// -// Yes... Overengineered is a word that comes to mind, but this is effectively -// a very similar problem to memchr, and virtually nobody has been able to -// resist optimizing the crap out of that (except for perhaps the BSD and MUSL -// folks). In particular, this routine makes a very common case (ASCII) very -// fast, which seems worth it. We do stop short of adding AVX variants of the -// code below in order to retain our sanity and also to avoid needing to deal -// with runtime target feature detection. RESIST! -// -// In order to understand the SIMD version below, it would be good to read this -// comment describing how my memchr routine works: -// https://github.com/BurntSushi/rust-memchr/blob/b0a29f267f4a7fad8ffcc8fe8377a06498202883/src/x86/sse2.rs#L19-L106 -// -// The primary difference with memchr is that for ASCII, we can do a bit less -// work. In particular, we don't need to detect the presence of a specific -// byte, but rather, whether any byte has its most significant bit set. That -// means we can effectively skip the _mm_cmpeq_epi8 step and jump straight to -// _mm_movemask_epi8. - -#[cfg(any(test, not(target_arch = "x86_64")))] -const USIZE_BYTES: usize = mem::size_of::(); -#[cfg(any(test, not(target_arch = "x86_64")))] -const FALLBACK_LOOP_SIZE: usize = 2 * USIZE_BYTES; - -// This is a mask where the most significant bit of each byte in the usize -// is set. We test this bit to determine whether a character is ASCII or not. -// Namely, a single byte is regarded as an ASCII codepoint if and only if it's -// most significant bit is not set. -#[cfg(any(test, not(target_arch = "x86_64")))] -const ASCII_MASK_U64: u64 = 0x8080808080808080; -#[cfg(any(test, not(target_arch = "x86_64")))] -const ASCII_MASK: usize = ASCII_MASK_U64 as usize; - -/// Returns the index of the first non ASCII byte in the given slice. -/// -/// If slice only contains ASCII bytes, then the length of the slice is -/// returned. -pub fn first_non_ascii_byte(slice: &[u8]) -> usize { - #[cfg(not(target_arch = "x86_64"))] - { - first_non_ascii_byte_fallback(slice) - } - - #[cfg(target_arch = "x86_64")] - { - first_non_ascii_byte_sse2(slice) - } -} - -#[cfg(any(test, not(target_arch = "x86_64")))] -fn first_non_ascii_byte_fallback(slice: &[u8]) -> usize { - let align = USIZE_BYTES - 1; - let start_ptr = slice.as_ptr(); - let end_ptr = slice[slice.len()..].as_ptr(); - let mut ptr = start_ptr; - - unsafe { - if slice.len() < USIZE_BYTES { - return first_non_ascii_byte_slow(start_ptr, end_ptr, ptr); - } - - let chunk = read_unaligned_usize(ptr); - let mask = chunk & ASCII_MASK; - if mask != 0 { - return first_non_ascii_byte_mask(mask); - } - - ptr = ptr_add(ptr, USIZE_BYTES - (start_ptr as usize & align)); - debug_assert!(ptr > start_ptr); - debug_assert!(ptr_sub(end_ptr, USIZE_BYTES) >= start_ptr); - if slice.len() >= FALLBACK_LOOP_SIZE { - while ptr <= ptr_sub(end_ptr, FALLBACK_LOOP_SIZE) { - debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES); - - let a = *(ptr as *const usize); - let b = *(ptr_add(ptr, USIZE_BYTES) as *const usize); - if (a | b) & ASCII_MASK != 0 { - // What a kludge. We wrap the position finding code into - // a non-inlineable function, which makes the codegen in - // the tight loop above a bit better by avoiding a - // couple extra movs. We pay for it by two additional - // stores, but only in the case of finding a non-ASCII - // byte. - #[inline(never)] - unsafe fn findpos( - start_ptr: *const u8, - ptr: *const u8, - ) -> usize { - let a = *(ptr as *const usize); - let b = *(ptr_add(ptr, USIZE_BYTES) as *const usize); - - let mut at = sub(ptr, start_ptr); - let maska = a & ASCII_MASK; - if maska != 0 { - return at + first_non_ascii_byte_mask(maska); - } - - at += USIZE_BYTES; - let maskb = b & ASCII_MASK; - debug_assert!(maskb != 0); - return at + first_non_ascii_byte_mask(maskb); - } - return findpos(start_ptr, ptr); - } - ptr = ptr_add(ptr, FALLBACK_LOOP_SIZE); - } - } - first_non_ascii_byte_slow(start_ptr, end_ptr, ptr) - } -} - -#[cfg(target_arch = "x86_64")] -fn first_non_ascii_byte_sse2(slice: &[u8]) -> usize { - use core::arch::x86_64::*; - - const VECTOR_SIZE: usize = mem::size_of::<__m128i>(); - const VECTOR_ALIGN: usize = VECTOR_SIZE - 1; - const VECTOR_LOOP_SIZE: usize = 4 * VECTOR_SIZE; - - let start_ptr = slice.as_ptr(); - let end_ptr = slice[slice.len()..].as_ptr(); - let mut ptr = start_ptr; - - unsafe { - if slice.len() < VECTOR_SIZE { - return first_non_ascii_byte_slow(start_ptr, end_ptr, ptr); - } - - let chunk = _mm_loadu_si128(ptr as *const __m128i); - let mask = _mm_movemask_epi8(chunk); - if mask != 0 { - return mask.trailing_zeros() as usize; - } - - ptr = ptr.add(VECTOR_SIZE - (start_ptr as usize & VECTOR_ALIGN)); - debug_assert!(ptr > start_ptr); - debug_assert!(end_ptr.sub(VECTOR_SIZE) >= start_ptr); - if slice.len() >= VECTOR_LOOP_SIZE { - while ptr <= ptr_sub(end_ptr, VECTOR_LOOP_SIZE) { - debug_assert_eq!(0, (ptr as usize) % VECTOR_SIZE); - - let a = _mm_load_si128(ptr as *const __m128i); - let b = _mm_load_si128(ptr.add(VECTOR_SIZE) as *const __m128i); - let c = - _mm_load_si128(ptr.add(2 * VECTOR_SIZE) as *const __m128i); - let d = - _mm_load_si128(ptr.add(3 * VECTOR_SIZE) as *const __m128i); - - let or1 = _mm_or_si128(a, b); - let or2 = _mm_or_si128(c, d); - let or3 = _mm_or_si128(or1, or2); - if _mm_movemask_epi8(or3) != 0 { - let mut at = sub(ptr, start_ptr); - let mask = _mm_movemask_epi8(a); - if mask != 0 { - return at + mask.trailing_zeros() as usize; - } - - at += VECTOR_SIZE; - let mask = _mm_movemask_epi8(b); - if mask != 0 { - return at + mask.trailing_zeros() as usize; - } - - at += VECTOR_SIZE; - let mask = _mm_movemask_epi8(c); - if mask != 0 { - return at + mask.trailing_zeros() as usize; - } - - at += VECTOR_SIZE; - let mask = _mm_movemask_epi8(d); - debug_assert!(mask != 0); - return at + mask.trailing_zeros() as usize; - } - ptr = ptr_add(ptr, VECTOR_LOOP_SIZE); - } - } - while ptr <= end_ptr.sub(VECTOR_SIZE) { - debug_assert!(sub(end_ptr, ptr) >= VECTOR_SIZE); - - let chunk = _mm_loadu_si128(ptr as *const __m128i); - let mask = _mm_movemask_epi8(chunk); - if mask != 0 { - return sub(ptr, start_ptr) + mask.trailing_zeros() as usize; - } - ptr = ptr.add(VECTOR_SIZE); - } - first_non_ascii_byte_slow(start_ptr, end_ptr, ptr) - } -} - -#[inline(always)] -unsafe fn first_non_ascii_byte_slow( - start_ptr: *const u8, - end_ptr: *const u8, - mut ptr: *const u8, -) -> usize { - debug_assert!(start_ptr <= ptr); - debug_assert!(ptr <= end_ptr); - - while ptr < end_ptr { - if *ptr > 0x7F { - return sub(ptr, start_ptr); - } - ptr = ptr.offset(1); - } - sub(end_ptr, start_ptr) -} - -/// Compute the position of the first ASCII byte in the given mask. -/// -/// The mask should be computed by `chunk & ASCII_MASK`, where `chunk` is -/// 8 contiguous bytes of the slice being checked where *at least* one of those -/// bytes is not an ASCII byte. -/// -/// The position returned is always in the inclusive range [0, 7]. -#[cfg(any(test, not(target_arch = "x86_64")))] -fn first_non_ascii_byte_mask(mask: usize) -> usize { - #[cfg(target_endian = "little")] - { - mask.trailing_zeros() as usize / 8 - } - #[cfg(target_endian = "big")] - { - mask.leading_zeros() as usize / 8 - } -} - -/// Increment the given pointer by the given amount. -unsafe fn ptr_add(ptr: *const u8, amt: usize) -> *const u8 { - debug_assert!(amt < ::core::isize::MAX as usize); - ptr.offset(amt as isize) -} - -/// Decrement the given pointer by the given amount. -unsafe fn ptr_sub(ptr: *const u8, amt: usize) -> *const u8 { - debug_assert!(amt < ::core::isize::MAX as usize); - ptr.offset((amt as isize).wrapping_neg()) -} - -#[cfg(any(test, not(target_arch = "x86_64")))] -unsafe fn read_unaligned_usize(ptr: *const u8) -> usize { - use core::ptr; - - let mut n: usize = 0; - ptr::copy_nonoverlapping(ptr, &mut n as *mut _ as *mut u8, USIZE_BYTES); - n -} - -/// Subtract `b` from `a` and return the difference. `a` should be greater than -/// or equal to `b`. -fn sub(a: *const u8, b: *const u8) -> usize { - debug_assert!(a >= b); - (a as usize) - (b as usize) -} - -#[cfg(test)] -mod tests { - use super::*; - - // Our testing approach here is to try and exhaustively test every case. - // This includes the position at which a non-ASCII byte occurs in addition - // to the alignment of the slice that we're searching. - - #[test] - fn positive_fallback_forward() { - for i in 0..517 { - let s = "a".repeat(i); - assert_eq!( - i, - first_non_ascii_byte_fallback(s.as_bytes()), - "i: {:?}, len: {:?}, s: {:?}", - i, - s.len(), - s - ); - } - } - - #[test] - #[cfg(target_arch = "x86_64")] - fn positive_sse2_forward() { - for i in 0..517 { - let b = "a".repeat(i).into_bytes(); - assert_eq!(b.len(), first_non_ascii_byte_sse2(&b)); - } - } - - #[test] - fn negative_fallback_forward() { - for i in 0..517 { - for align in 0..65 { - let mut s = "a".repeat(i); - s.push_str("☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃"); - let s = s.get(align..).unwrap_or(""); - assert_eq!( - i.saturating_sub(align), - first_non_ascii_byte_fallback(s.as_bytes()), - "i: {:?}, align: {:?}, len: {:?}, s: {:?}", - i, - align, - s.len(), - s - ); - } - } - } - - #[test] - #[cfg(target_arch = "x86_64")] - fn negative_sse2_forward() { - for i in 0..517 { - for align in 0..65 { - let mut s = "a".repeat(i); - s.push_str("☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃"); - let s = s.get(align..).unwrap_or(""); - assert_eq!( - i.saturating_sub(align), - first_non_ascii_byte_sse2(s.as_bytes()), - "i: {:?}, align: {:?}, len: {:?}, s: {:?}", - i, - align, - s.len(), - s - ); - } - } - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstring.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstring.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstring.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstring.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -use crate::bstr::BStr; - -/// A wrapper for `Vec` that provides convenient string oriented trait -/// impls. -/// -/// A `BString` has ownership over its contents and corresponds to -/// a growable or shrinkable buffer. Its borrowed counterpart is a -/// [`BStr`](struct.BStr.html), called a byte string slice. -/// -/// Using a `BString` is just like using a `Vec`, since `BString` -/// implements `Deref` to `Vec`. So all methods available on `Vec` -/// are also available on `BString`. -/// -/// # Examples -/// -/// You can create a new `BString` from a `Vec` via a `From` impl: -/// -/// ``` -/// use bstr::BString; -/// -/// let s = BString::from("Hello, world!"); -/// ``` -/// -/// # Deref -/// -/// The `BString` type implements `Deref` and `DerefMut`, where the target -/// types are `&Vec` and `&mut Vec`, respectively. `Deref` permits all of the -/// methods defined on `Vec` to be implicitly callable on any `BString`. -/// -/// For more information about how deref works, see the documentation for the -/// [`std::ops::Deref`](https://doc.rust-lang.org/std/ops/trait.Deref.html) -/// trait. -/// -/// # Representation -/// -/// A `BString` has the same representation as a `Vec` and a `String`. -/// That is, it is made up of three word sized components: a pointer to a -/// region of memory containing the bytes, a length and a capacity. -#[derive(Clone, Hash)] -pub struct BString { - pub(crate) bytes: Vec, -} - -impl BString { - #[inline] - pub(crate) fn as_bytes(&self) -> &[u8] { - &self.bytes - } - - #[inline] - pub(crate) fn as_bstr(&self) -> &BStr { - BStr::new(&self.bytes) - } - - #[inline] - pub(crate) fn as_mut_bstr(&mut self) -> &mut BStr { - BStr::new_mut(&mut self.bytes) - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstr.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstr.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstr.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/bstr.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -use core::mem; - -/// A wrapper for `&[u8]` that provides convenient string oriented trait impls. -/// -/// If you need ownership or a growable byte string buffer, then use -/// [`BString`](struct.BString.html). -/// -/// Using a `&BStr` is just like using a `&[u8]`, since `BStr` -/// implements `Deref` to `[u8]`. So all methods available on `[u8]` -/// are also available on `BStr`. -/// -/// # Representation -/// -/// A `&BStr` has the same representation as a `&str`. That is, a `&BStr` is -/// a fat pointer which consists of a pointer to some bytes and a length. -/// -/// # Trait implementations -/// -/// The `BStr` type has a number of trait implementations, and in particular, -/// defines equality and ordinal comparisons between `&BStr`, `&str` and -/// `&[u8]` for convenience. -/// -/// The `Debug` implementation for `BStr` shows its bytes as a normal string. -/// For invalid UTF-8, hex escape sequences are used. -/// -/// The `Display` implementation behaves as if `BStr` were first lossily -/// converted to a `str`. Invalid UTF-8 bytes are substituted with the Unicode -/// replacement codepoint, which looks like this: �. -#[derive(Hash)] -#[repr(transparent)] -pub struct BStr { - pub(crate) bytes: [u8], -} - -impl BStr { - #[inline] - pub(crate) fn new>(bytes: &B) -> &BStr { - BStr::from_bytes(bytes.as_ref()) - } - - #[inline] - pub(crate) fn new_mut>( - bytes: &mut B, - ) -> &mut BStr { - BStr::from_bytes_mut(bytes.as_mut()) - } - - #[inline] - pub(crate) fn from_bytes(slice: &[u8]) -> &BStr { - unsafe { mem::transmute(slice) } - } - - #[inline] - pub(crate) fn from_bytes_mut(slice: &mut [u8]) -> &mut BStr { - unsafe { mem::transmute(slice) } - } - - #[inline] - #[cfg(feature = "std")] - pub(crate) fn from_boxed_bytes(slice: Box<[u8]>) -> Box { - unsafe { Box::from_raw(Box::into_raw(slice) as _) } - } - - #[inline] - #[cfg(feature = "std")] - pub(crate) fn into_boxed_bytes(slice: Box) -> Box<[u8]> { - unsafe { Box::from_raw(Box::into_raw(slice) as _) } - } - - #[inline] - pub(crate) fn as_bytes(&self) -> &[u8] { - &self.bytes - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/mod.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -use memchr::{memchr, memchr2, memchr3, memrchr, memrchr2, memrchr3}; -mod scalar; - -#[inline] -fn build_table(byteset: &[u8]) -> [u8; 256] { - let mut table = [0u8; 256]; - for &b in byteset { - table[b as usize] = 1; - } - table -} - -#[inline] -pub(crate) fn find(haystack: &[u8], byteset: &[u8]) -> Option { - match byteset.len() { - 0 => return None, - 1 => memchr(byteset[0], haystack), - 2 => memchr2(byteset[0], byteset[1], haystack), - 3 => memchr3(byteset[0], byteset[1], byteset[2], haystack), - _ => { - let table = build_table(byteset); - scalar::forward_search_bytes(haystack, |b| table[b as usize] != 0) - } - } -} - -#[inline] -pub(crate) fn rfind(haystack: &[u8], byteset: &[u8]) -> Option { - match byteset.len() { - 0 => return None, - 1 => memrchr(byteset[0], haystack), - 2 => memrchr2(byteset[0], byteset[1], haystack), - 3 => memrchr3(byteset[0], byteset[1], byteset[2], haystack), - _ => { - let table = build_table(byteset); - scalar::reverse_search_bytes(haystack, |b| table[b as usize] != 0) - } - } -} - -#[inline] -pub(crate) fn find_not(haystack: &[u8], byteset: &[u8]) -> Option { - if haystack.is_empty() { - return None; - } - match byteset.len() { - 0 => return Some(0), - 1 => scalar::inv_memchr(byteset[0], haystack), - 2 => scalar::forward_search_bytes(haystack, |b| { - b != byteset[0] && b != byteset[1] - }), - 3 => scalar::forward_search_bytes(haystack, |b| { - b != byteset[0] && b != byteset[1] && b != byteset[2] - }), - _ => { - let table = build_table(byteset); - scalar::forward_search_bytes(haystack, |b| table[b as usize] == 0) - } - } -} -#[inline] -pub(crate) fn rfind_not(haystack: &[u8], byteset: &[u8]) -> Option { - if haystack.is_empty() { - return None; - } - match byteset.len() { - 0 => return Some(haystack.len() - 1), - 1 => scalar::inv_memrchr(byteset[0], haystack), - 2 => scalar::reverse_search_bytes(haystack, |b| { - b != byteset[0] && b != byteset[1] - }), - 3 => scalar::reverse_search_bytes(haystack, |b| { - b != byteset[0] && b != byteset[1] && b != byteset[2] - }), - _ => { - let table = build_table(byteset); - scalar::reverse_search_bytes(haystack, |b| table[b as usize] == 0) - } - } -} - -#[cfg(test)] -mod tests { - quickcheck::quickcheck! { - fn qc_byteset_forward_matches_naive( - haystack: Vec, - needles: Vec - ) -> bool { - super::find(&haystack, &needles) - == haystack.iter().position(|b| needles.contains(b)) - } - fn qc_byteset_backwards_matches_naive( - haystack: Vec, - needles: Vec - ) -> bool { - super::rfind(&haystack, &needles) - == haystack.iter().rposition(|b| needles.contains(b)) - } - fn qc_byteset_forward_not_matches_naive( - haystack: Vec, - needles: Vec - ) -> bool { - super::find_not(&haystack, &needles) - == haystack.iter().position(|b| !needles.contains(b)) - } - fn qc_byteset_backwards_not_matches_naive( - haystack: Vec, - needles: Vec - ) -> bool { - super::rfind_not(&haystack, &needles) - == haystack.iter().rposition(|b| !needles.contains(b)) - } - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/scalar.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/scalar.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/scalar.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/byteset/scalar.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,295 +0,0 @@ -// This is adapted from `fallback.rs` from rust-memchr. It's modified to return -// the 'inverse' query of memchr, e.g. finding the first byte not in the provided -// set. This is simple for the 1-byte case. - -use core::cmp; -use core::usize; - -#[cfg(target_pointer_width = "32")] -const USIZE_BYTES: usize = 4; - -#[cfg(target_pointer_width = "64")] -const USIZE_BYTES: usize = 8; - -// The number of bytes to loop at in one iteration of memchr/memrchr. -const LOOP_SIZE: usize = 2 * USIZE_BYTES; - -/// Repeat the given byte into a word size number. That is, every 8 bits -/// is equivalent to the given byte. For example, if `b` is `\x4E` or -/// `01001110` in binary, then the returned value on a 32-bit system would be: -/// `01001110_01001110_01001110_01001110`. -#[inline(always)] -fn repeat_byte(b: u8) -> usize { - (b as usize) * (usize::MAX / 255) -} - -pub fn inv_memchr(n1: u8, haystack: &[u8]) -> Option { - let vn1 = repeat_byte(n1); - let confirm = |byte| byte != n1; - let loop_size = cmp::min(LOOP_SIZE, haystack.len()); - let align = USIZE_BYTES - 1; - let start_ptr = haystack.as_ptr(); - let end_ptr = haystack[haystack.len()..].as_ptr(); - let mut ptr = start_ptr; - - unsafe { - if haystack.len() < USIZE_BYTES { - return forward_search(start_ptr, end_ptr, ptr, confirm); - } - - let chunk = read_unaligned_usize(ptr); - if (chunk ^ vn1) != 0 { - return forward_search(start_ptr, end_ptr, ptr, confirm); - } - - ptr = ptr.add(USIZE_BYTES - (start_ptr as usize & align)); - debug_assert!(ptr > start_ptr); - debug_assert!(end_ptr.sub(USIZE_BYTES) >= start_ptr); - while loop_size == LOOP_SIZE && ptr <= end_ptr.sub(loop_size) { - debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES); - - let a = *(ptr as *const usize); - let b = *(ptr.add(USIZE_BYTES) as *const usize); - let eqa = (a ^ vn1) != 0; - let eqb = (b ^ vn1) != 0; - if eqa || eqb { - break; - } - ptr = ptr.add(LOOP_SIZE); - } - forward_search(start_ptr, end_ptr, ptr, confirm) - } -} - -/// Return the last index not matching the byte `x` in `text`. -pub fn inv_memrchr(n1: u8, haystack: &[u8]) -> Option { - let vn1 = repeat_byte(n1); - let confirm = |byte| byte != n1; - let loop_size = cmp::min(LOOP_SIZE, haystack.len()); - let align = USIZE_BYTES - 1; - let start_ptr = haystack.as_ptr(); - let end_ptr = haystack[haystack.len()..].as_ptr(); - let mut ptr = end_ptr; - - unsafe { - if haystack.len() < USIZE_BYTES { - return reverse_search(start_ptr, end_ptr, ptr, confirm); - } - - let chunk = read_unaligned_usize(ptr.sub(USIZE_BYTES)); - if (chunk ^ vn1) != 0 { - return reverse_search(start_ptr, end_ptr, ptr, confirm); - } - - ptr = (end_ptr as usize & !align) as *const u8; - debug_assert!(start_ptr <= ptr && ptr <= end_ptr); - while loop_size == LOOP_SIZE && ptr >= start_ptr.add(loop_size) { - debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES); - - let a = *(ptr.sub(2 * USIZE_BYTES) as *const usize); - let b = *(ptr.sub(1 * USIZE_BYTES) as *const usize); - let eqa = (a ^ vn1) != 0; - let eqb = (b ^ vn1) != 0; - if eqa || eqb { - break; - } - ptr = ptr.sub(loop_size); - } - reverse_search(start_ptr, end_ptr, ptr, confirm) - } -} - -#[inline(always)] -unsafe fn forward_search bool>( - start_ptr: *const u8, - end_ptr: *const u8, - mut ptr: *const u8, - confirm: F, -) -> Option { - debug_assert!(start_ptr <= ptr); - debug_assert!(ptr <= end_ptr); - - while ptr < end_ptr { - if confirm(*ptr) { - return Some(sub(ptr, start_ptr)); - } - ptr = ptr.offset(1); - } - None -} - -#[inline(always)] -unsafe fn reverse_search bool>( - start_ptr: *const u8, - end_ptr: *const u8, - mut ptr: *const u8, - confirm: F, -) -> Option { - debug_assert!(start_ptr <= ptr); - debug_assert!(ptr <= end_ptr); - - while ptr > start_ptr { - ptr = ptr.offset(-1); - if confirm(*ptr) { - return Some(sub(ptr, start_ptr)); - } - } - None -} - -unsafe fn read_unaligned_usize(ptr: *const u8) -> usize { - (ptr as *const usize).read_unaligned() -} - -/// Subtract `b` from `a` and return the difference. `a` should be greater than -/// or equal to `b`. -fn sub(a: *const u8, b: *const u8) -> usize { - debug_assert!(a >= b); - (a as usize) - (b as usize) -} - -/// Safe wrapper around `forward_search` -#[inline] -pub(crate) fn forward_search_bytes bool>( - s: &[u8], - confirm: F, -) -> Option { - unsafe { - let start = s.as_ptr(); - let end = start.add(s.len()); - forward_search(start, end, start, confirm) - } -} - -/// Safe wrapper around `reverse_search` -#[inline] -pub(crate) fn reverse_search_bytes bool>( - s: &[u8], - confirm: F, -) -> Option { - unsafe { - let start = s.as_ptr(); - let end = start.add(s.len()); - reverse_search(start, end, end, confirm) - } -} - -#[cfg(test)] -mod tests { - use super::{inv_memchr, inv_memrchr}; - // search string, search byte, inv_memchr result, inv_memrchr result. - // these are expanded into a much larger set of tests in build_tests - const TESTS: &[(&[u8], u8, usize, usize)] = &[ - (b"z", b'a', 0, 0), - (b"zz", b'a', 0, 1), - (b"aza", b'a', 1, 1), - (b"zaz", b'a', 0, 2), - (b"zza", b'a', 0, 1), - (b"zaa", b'a', 0, 0), - (b"zzz", b'a', 0, 2), - ]; - - type TestCase = (Vec, u8, Option<(usize, usize)>); - - fn build_tests() -> Vec { - let mut result = vec![]; - for &(search, byte, fwd_pos, rev_pos) in TESTS { - result.push((search.to_vec(), byte, Some((fwd_pos, rev_pos)))); - for i in 1..515 { - // add a bunch of copies of the search byte to the end. - let mut suffixed: Vec = search.into(); - suffixed.extend(std::iter::repeat(byte).take(i)); - result.push((suffixed, byte, Some((fwd_pos, rev_pos)))); - - // add a bunch of copies of the search byte to the start. - let mut prefixed: Vec = - std::iter::repeat(byte).take(i).collect(); - prefixed.extend(search); - result.push(( - prefixed, - byte, - Some((fwd_pos + i, rev_pos + i)), - )); - - // add a bunch of copies of the search byte to both ends. - let mut surrounded: Vec = - std::iter::repeat(byte).take(i).collect(); - surrounded.extend(search); - surrounded.extend(std::iter::repeat(byte).take(i)); - result.push(( - surrounded, - byte, - Some((fwd_pos + i, rev_pos + i)), - )); - } - } - - // build non-matching tests for several sizes - for i in 0..515 { - result.push(( - std::iter::repeat(b'\0').take(i).collect(), - b'\0', - None, - )); - } - - result - } - - #[test] - fn test_inv_memchr() { - use crate::{ByteSlice, B}; - for (search, byte, matching) in build_tests() { - assert_eq!( - inv_memchr(byte, &search), - matching.map(|m| m.0), - "inv_memchr when searching for {:?} in {:?}", - byte as char, - // better printing - B(&search).as_bstr(), - ); - assert_eq!( - inv_memrchr(byte, &search), - matching.map(|m| m.1), - "inv_memrchr when searching for {:?} in {:?}", - byte as char, - // better printing - B(&search).as_bstr(), - ); - // Test a rather large number off offsets for potential alignment issues - for offset in 1..130 { - if offset >= search.len() { - break; - } - // If this would cause us to shift the results off the end, skip - // it so that we don't have to recompute them. - if let Some((f, r)) = matching { - if offset > f || offset > r { - break; - } - } - let realigned = &search[offset..]; - - let forward_pos = matching.map(|m| m.0 - offset); - let reverse_pos = matching.map(|m| m.1 - offset); - - assert_eq!( - inv_memchr(byte, &realigned), - forward_pos, - "inv_memchr when searching (realigned by {}) for {:?} in {:?}", - offset, - byte as char, - realigned.as_bstr(), - ); - assert_eq!( - inv_memrchr(byte, &realigned), - reverse_pos, - "inv_memrchr when searching (realigned by {}) for {:?} in {:?}", - offset, - byte as char, - realigned.as_bstr(), - ); - } - } - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_slice.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_slice.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_slice.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_slice.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3655 +0,0 @@ -#[cfg(feature = "std")] -use std::borrow::Cow; -#[cfg(feature = "std")] -use std::ffi::OsStr; -#[cfg(feature = "std")] -use std::path::Path; - -use core::{iter, ops, ptr, slice, str}; -use memchr::{memchr, memmem, memrchr}; - -use crate::ascii; -use crate::bstr::BStr; -use crate::byteset; -#[cfg(feature = "std")] -use crate::ext_vec::ByteVec; -#[cfg(feature = "unicode")] -use crate::unicode::{ - whitespace_len_fwd, whitespace_len_rev, GraphemeIndices, Graphemes, - SentenceIndices, Sentences, WordIndices, Words, WordsWithBreakIndices, - WordsWithBreaks, -}; -use crate::utf8::{self, CharIndices, Chars, Utf8Chunks, Utf8Error}; - -/// A short-hand constructor for building a `&[u8]`. -/// -/// This idiosyncratic constructor is useful for concisely building byte string -/// slices. Its primary utility is in conveniently writing byte string literals -/// in a uniform way. For example, consider this code that does not compile: -/// -/// ```ignore -/// let strs = vec![b"a", b"xy"]; -/// ``` -/// -/// The above code doesn't compile because the type of the byte string literal -/// `b"a"` is `&'static [u8; 1]`, and the type of `b"xy"` is -/// `&'static [u8; 2]`. Since their types aren't the same, they can't be stored -/// in the same `Vec`. (This is dissimilar from normal Unicode string slices, -/// where both `"a"` and `"xy"` have the same type of `&'static str`.) -/// -/// One way of getting the above code to compile is to convert byte strings to -/// slices. You might try this: -/// -/// ```ignore -/// let strs = vec![&b"a", &b"xy"]; -/// ``` -/// -/// But this just creates values with type `& &'static [u8; 1]` and -/// `& &'static [u8; 2]`. Instead, you need to force the issue like so: -/// -/// ``` -/// let strs = vec![&b"a"[..], &b"xy"[..]]; -/// // or -/// let strs = vec![b"a".as_ref(), b"xy".as_ref()]; -/// ``` -/// -/// But neither of these are particularly convenient to type, especially when -/// it's something as common as a string literal. Thus, this constructor -/// permits writing the following instead: -/// -/// ``` -/// use bstr::B; -/// -/// let strs = vec![B("a"), B(b"xy")]; -/// ``` -/// -/// Notice that this also lets you mix and match both string literals and byte -/// string literals. This can be quite convenient! -#[allow(non_snake_case)] -#[inline] -pub fn B<'a, B: ?Sized + AsRef<[u8]>>(bytes: &'a B) -> &'a [u8] { - bytes.as_ref() -} - -impl ByteSlice for [u8] { - #[inline] - fn as_bytes(&self) -> &[u8] { - self - } - - #[inline] - fn as_bytes_mut(&mut self) -> &mut [u8] { - self - } -} - -/// Ensure that callers cannot implement `ByteSlice` by making an -/// umplementable trait its super trait. -pub trait Sealed {} -impl Sealed for [u8] {} - -/// A trait that extends `&[u8]` with string oriented methods. -pub trait ByteSlice: Sealed { - /// A method for accessing the raw bytes of this type. This is always a - /// no-op and callers shouldn't care about it. This only exists for making - /// the extension trait work. - #[doc(hidden)] - fn as_bytes(&self) -> &[u8]; - - /// A method for accessing the raw bytes of this type, mutably. This is - /// always a no-op and callers shouldn't care about it. This only exists - /// for making the extension trait work. - #[doc(hidden)] - fn as_bytes_mut(&mut self) -> &mut [u8]; - - /// Return this byte slice as a `&BStr`. - /// - /// Use `&BStr` is useful because of its `fmt::Debug` representation - /// and various other trait implementations (such as `PartialEq` and - /// `PartialOrd`). In particular, the `Debug` implementation for `BStr` - /// shows its bytes as a normal string. For invalid UTF-8, hex escape - /// sequences are used. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// println!("{:?}", b"foo\xFFbar".as_bstr()); - /// ``` - #[inline] - fn as_bstr(&self) -> &BStr { - BStr::new(self.as_bytes()) - } - - /// Return this byte slice as a `&mut BStr`. - /// - /// Use `&mut BStr` is useful because of its `fmt::Debug` representation - /// and various other trait implementations (such as `PartialEq` and - /// `PartialOrd`). In particular, the `Debug` implementation for `BStr` - /// shows its bytes as a normal string. For invalid UTF-8, hex escape - /// sequences are used. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut bytes = *b"foo\xFFbar"; - /// println!("{:?}", &mut bytes.as_bstr_mut()); - /// ``` - #[inline] - fn as_bstr_mut(&mut self) -> &mut BStr { - BStr::new_mut(self.as_bytes_mut()) - } - - /// Create an immutable byte string from an OS string slice. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns `None` if the given OS string is not valid UTF-8. (For - /// example, on Windows, file paths are allowed to be a sequence of - /// arbitrary 16-bit integers. Not all such sequences can be transcoded to - /// valid UTF-8.) - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::ffi::OsStr; - /// - /// use bstr::{B, ByteSlice}; - /// - /// let os_str = OsStr::new("foo"); - /// let bs = <[u8]>::from_os_str(os_str).expect("should be valid UTF-8"); - /// assert_eq!(bs, B("foo")); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn from_os_str(os_str: &OsStr) -> Option<&[u8]> { - #[cfg(unix)] - #[inline] - fn imp(os_str: &OsStr) -> Option<&[u8]> { - use std::os::unix::ffi::OsStrExt; - - Some(os_str.as_bytes()) - } - - #[cfg(not(unix))] - #[inline] - fn imp(os_str: &OsStr) -> Option<&[u8]> { - os_str.to_str().map(|s| s.as_bytes()) - } - - imp(os_str) - } - - /// Create an immutable byte string from a file path. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns `None` if the given path is not valid UTF-8. (For example, - /// on Windows, file paths are allowed to be a sequence of arbitrary 16-bit - /// integers. Not all such sequences can be transcoded to valid UTF-8.) - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::path::Path; - /// - /// use bstr::{B, ByteSlice}; - /// - /// let path = Path::new("foo"); - /// let bs = <[u8]>::from_path(path).expect("should be valid UTF-8"); - /// assert_eq!(bs, B("foo")); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn from_path(path: &Path) -> Option<&[u8]> { - Self::from_os_str(path.as_os_str()) - } - - /// Safely convert this byte string into a `&str` if it's valid UTF-8. - /// - /// If this byte string is not valid UTF-8, then an error is returned. The - /// error returned indicates the first invalid byte found and the length - /// of the error. - /// - /// In cases where a lossy conversion to `&str` is acceptable, then use one - /// of the [`to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) or - /// [`to_str_lossy_into`](trait.ByteSlice.html#method.to_str_lossy_into) - /// methods. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice, ByteVec}; - /// - /// # fn example() -> Result<(), bstr::Utf8Error> { - /// let s = B("☃βツ").to_str()?; - /// assert_eq!("☃βツ", s); - /// - /// let mut bstring = >::from("☃βツ"); - /// bstring.push(b'\xFF'); - /// let err = bstring.to_str().unwrap_err(); - /// assert_eq!(8, err.valid_up_to()); - /// # Ok(()) }; example().unwrap() - /// ``` - #[inline] - fn to_str(&self) -> Result<&str, Utf8Error> { - utf8::validate(self.as_bytes()).map(|_| { - // SAFETY: This is safe because of the guarantees provided by - // utf8::validate. - unsafe { str::from_utf8_unchecked(self.as_bytes()) } - }) - } - - /// Unsafely convert this byte string into a `&str`, without checking for - /// valid UTF-8. - /// - /// # Safety - /// - /// Callers *must* ensure that this byte string is valid UTF-8 before - /// calling this method. Converting a byte string into a `&str` that is - /// not valid UTF-8 is considered undefined behavior. - /// - /// This routine is useful in performance sensitive contexts where the - /// UTF-8 validity of the byte string is already known and it is - /// undesirable to pay the cost of an additional UTF-8 validation check - /// that [`to_str`](trait.ByteSlice.html#method.to_str) performs. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// // SAFETY: This is safe because string literals are guaranteed to be - /// // valid UTF-8 by the Rust compiler. - /// let s = unsafe { B("☃βツ").to_str_unchecked() }; - /// assert_eq!("☃βツ", s); - /// ``` - #[inline] - unsafe fn to_str_unchecked(&self) -> &str { - str::from_utf8_unchecked(self.as_bytes()) - } - - /// Convert this byte string to a valid UTF-8 string by replacing invalid - /// UTF-8 bytes with the Unicode replacement codepoint (`U+FFFD`). - /// - /// If the byte string is already valid UTF-8, then no copying or - /// allocation is performed and a borrrowed string slice is returned. If - /// the byte string is not valid UTF-8, then an owned string buffer is - /// returned with invalid bytes replaced by the replacement codepoint. - /// - /// This method uses the "substitution of maximal subparts" (Unicode - /// Standard, Chapter 3, Section 9) strategy for inserting the replacement - /// codepoint. Specifically, a replacement codepoint is inserted whenever a - /// byte is found that cannot possibly lead to a valid code unit sequence. - /// If there were previous bytes that represented a prefix of a well-formed - /// code unit sequence, then all of those bytes are substituted with a - /// single replacement codepoint. The "substitution of maximal subparts" - /// strategy is the same strategy used by - /// [W3C's Encoding standard](https://www.w3.org/TR/encoding/). - /// For a more precise description of the maximal subpart strategy, see - /// the Unicode Standard, Chapter 3, Section 9. See also - /// [Public Review Issue #121](http://www.unicode.org/review/pr-121.html). - /// - /// N.B. Rust's standard library also appears to use the same strategy, - /// but it does not appear to be an API guarantee. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::borrow::Cow; - /// - /// use bstr::ByteSlice; - /// - /// let mut bstring = >::from("☃βツ"); - /// assert_eq!(Cow::Borrowed("☃βツ"), bstring.to_str_lossy()); - /// - /// // Add a byte that makes the sequence invalid. - /// bstring.push(b'\xFF'); - /// assert_eq!(Cow::Borrowed("☃βツ\u{FFFD}"), bstring.to_str_lossy()); - /// ``` - /// - /// This demonstrates the "maximal subpart" substitution logic. - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// // \x61 is the ASCII codepoint for 'a'. - /// // \xF1\x80\x80 is a valid 3-byte code unit prefix. - /// // \xE1\x80 is a valid 2-byte code unit prefix. - /// // \xC2 is a valid 1-byte code unit prefix. - /// // \x62 is the ASCII codepoint for 'b'. - /// // - /// // In sum, each of the prefixes is replaced by a single replacement - /// // codepoint since none of the prefixes are properly completed. This - /// // is in contrast to other strategies that might insert a replacement - /// // codepoint for every single byte. - /// let bs = B(b"\x61\xF1\x80\x80\xE1\x80\xC2\x62"); - /// assert_eq!("a\u{FFFD}\u{FFFD}\u{FFFD}b", bs.to_str_lossy()); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_str_lossy(&self) -> Cow<'_, str> { - match utf8::validate(self.as_bytes()) { - Ok(()) => { - // SAFETY: This is safe because of the guarantees provided by - // utf8::validate. - unsafe { - Cow::Borrowed(str::from_utf8_unchecked(self.as_bytes())) - } - } - Err(err) => { - let mut lossy = String::with_capacity(self.as_bytes().len()); - let (valid, after) = - self.as_bytes().split_at(err.valid_up_to()); - // SAFETY: This is safe because utf8::validate guarantees - // that all of `valid` is valid UTF-8. - lossy.push_str(unsafe { str::from_utf8_unchecked(valid) }); - lossy.push_str("\u{FFFD}"); - if let Some(len) = err.error_len() { - after[len..].to_str_lossy_into(&mut lossy); - } - Cow::Owned(lossy) - } - } - } - - /// Copy the contents of this byte string into the given owned string - /// buffer, while replacing invalid UTF-8 code unit sequences with the - /// Unicode replacement codepoint (`U+FFFD`). - /// - /// This method uses the same "substitution of maximal subparts" strategy - /// for inserting the replacement codepoint as the - /// [`to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) method. - /// - /// This routine is useful for amortizing allocation. However, unlike - /// `to_str_lossy`, this routine will _always_ copy the contents of this - /// byte string into the destination buffer, even if this byte string is - /// valid UTF-8. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::borrow::Cow; - /// - /// use bstr::ByteSlice; - /// - /// let mut bstring = >::from("☃βツ"); - /// // Add a byte that makes the sequence invalid. - /// bstring.push(b'\xFF'); - /// - /// let mut dest = String::new(); - /// bstring.to_str_lossy_into(&mut dest); - /// assert_eq!("☃βツ\u{FFFD}", dest); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_str_lossy_into(&self, dest: &mut String) { - let mut bytes = self.as_bytes(); - dest.reserve(bytes.len()); - loop { - match utf8::validate(bytes) { - Ok(()) => { - // SAFETY: This is safe because utf8::validate guarantees - // that all of `bytes` is valid UTF-8. - dest.push_str(unsafe { str::from_utf8_unchecked(bytes) }); - break; - } - Err(err) => { - let (valid, after) = bytes.split_at(err.valid_up_to()); - // SAFETY: This is safe because utf8::validate guarantees - // that all of `valid` is valid UTF-8. - dest.push_str(unsafe { str::from_utf8_unchecked(valid) }); - dest.push_str("\u{FFFD}"); - match err.error_len() { - None => break, - Some(len) => bytes = &after[len..], - } - } - } - } - } - - /// Create an OS string slice from this byte string. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns a UTF-8 decoding error if this byte string is not valid - /// UTF-8. (For example, on Windows, file paths are allowed to be a - /// sequence of arbitrary 16-bit integers. There is no obvious mapping from - /// an arbitrary sequence of 8-bit integers to an arbitrary sequence of - /// 16-bit integers.) - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let os_str = b"foo".to_os_str().expect("should be valid UTF-8"); - /// assert_eq!(os_str, "foo"); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_os_str(&self) -> Result<&OsStr, Utf8Error> { - #[cfg(unix)] - #[inline] - fn imp(bytes: &[u8]) -> Result<&OsStr, Utf8Error> { - use std::os::unix::ffi::OsStrExt; - - Ok(OsStr::from_bytes(bytes)) - } - - #[cfg(not(unix))] - #[inline] - fn imp(bytes: &[u8]) -> Result<&OsStr, Utf8Error> { - bytes.to_str().map(OsStr::new) - } - - imp(self.as_bytes()) - } - - /// Lossily create an OS string slice from this byte string. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this will perform a UTF-8 check and lossily convert this byte string - /// into valid UTF-8 using the Unicode replacement codepoint. - /// - /// Note that this can prevent the correct roundtripping of file paths on - /// non-Unix systems such as Windows, where file paths are an arbitrary - /// sequence of 16-bit integers. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let os_str = b"foo\xFFbar".to_os_str_lossy(); - /// assert_eq!(os_str.to_string_lossy(), "foo\u{FFFD}bar"); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_os_str_lossy(&self) -> Cow<'_, OsStr> { - #[cfg(unix)] - #[inline] - fn imp(bytes: &[u8]) -> Cow<'_, OsStr> { - use std::os::unix::ffi::OsStrExt; - - Cow::Borrowed(OsStr::from_bytes(bytes)) - } - - #[cfg(not(unix))] - #[inline] - fn imp(bytes: &[u8]) -> Cow { - use std::ffi::OsString; - - match bytes.to_str_lossy() { - Cow::Borrowed(x) => Cow::Borrowed(OsStr::new(x)), - Cow::Owned(x) => Cow::Owned(OsString::from(x)), - } - } - - imp(self.as_bytes()) - } - - /// Create a path slice from this byte string. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns a UTF-8 decoding error if this byte string is not valid - /// UTF-8. (For example, on Windows, file paths are allowed to be a - /// sequence of arbitrary 16-bit integers. There is no obvious mapping from - /// an arbitrary sequence of 8-bit integers to an arbitrary sequence of - /// 16-bit integers.) - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let path = b"foo".to_path().expect("should be valid UTF-8"); - /// assert_eq!(path.as_os_str(), "foo"); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_path(&self) -> Result<&Path, Utf8Error> { - self.to_os_str().map(Path::new) - } - - /// Lossily create a path slice from this byte string. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this will perform a UTF-8 check and lossily convert this byte string - /// into valid UTF-8 using the Unicode replacement codepoint. - /// - /// Note that this can prevent the correct roundtripping of file paths on - /// non-Unix systems such as Windows, where file paths are an arbitrary - /// sequence of 16-bit integers. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"foo\xFFbar"; - /// let path = bs.to_path_lossy(); - /// assert_eq!(path.to_string_lossy(), "foo\u{FFFD}bar"); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_path_lossy(&self) -> Cow<'_, Path> { - use std::path::PathBuf; - - match self.to_os_str_lossy() { - Cow::Borrowed(x) => Cow::Borrowed(Path::new(x)), - Cow::Owned(x) => Cow::Owned(PathBuf::from(x)), - } - } - - /// Create a new byte string by repeating this byte string `n` times. - /// - /// # Panics - /// - /// This function panics if the capacity of the new byte string would - /// overflow. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// assert_eq!(b"foo".repeatn(4), B("foofoofoofoo")); - /// assert_eq!(b"foo".repeatn(0), B("")); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn repeatn(&self, n: usize) -> Vec { - let bs = self.as_bytes(); - let mut dst = vec![0; bs.len() * n]; - for i in 0..n { - dst[i * bs.len()..(i + 1) * bs.len()].copy_from_slice(bs); - } - dst - } - - /// Returns true if and only if this byte string contains the given needle. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert!(b"foo bar".contains_str("foo")); - /// assert!(b"foo bar".contains_str("bar")); - /// assert!(!b"foo".contains_str("foobar")); - /// ``` - #[inline] - fn contains_str>(&self, needle: B) -> bool { - self.find(needle).is_some() - } - - /// Returns true if and only if this byte string has the given prefix. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert!(b"foo bar".starts_with_str("foo")); - /// assert!(!b"foo bar".starts_with_str("bar")); - /// assert!(!b"foo".starts_with_str("foobar")); - /// ``` - #[inline] - fn starts_with_str>(&self, prefix: B) -> bool { - self.as_bytes().starts_with(prefix.as_ref()) - } - - /// Returns true if and only if this byte string has the given suffix. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert!(b"foo bar".ends_with_str("bar")); - /// assert!(!b"foo bar".ends_with_str("foo")); - /// assert!(!b"bar".ends_with_str("foobar")); - /// ``` - #[inline] - fn ends_with_str>(&self, suffix: B) -> bool { - self.as_bytes().ends_with(suffix.as_ref()) - } - - /// Returns the index of the first occurrence of the given needle. - /// - /// The needle may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// Note that if you're are searching for the same needle in many - /// different small haystacks, it may be faster to initialize a - /// [`Finder`](struct.Finder.html) once, and reuse it for each search. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the needle and the haystack. That is, this runs - /// in `O(needle.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo bar baz"; - /// assert_eq!(Some(0), s.find("foo")); - /// assert_eq!(Some(4), s.find("bar")); - /// assert_eq!(None, s.find("quux")); - /// ``` - #[inline] - fn find>(&self, needle: B) -> Option { - Finder::new(needle.as_ref()).find(self.as_bytes()) - } - - /// Returns the index of the last occurrence of the given needle. - /// - /// The needle may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// Note that if you're are searching for the same needle in many - /// different small haystacks, it may be faster to initialize a - /// [`FinderReverse`](struct.FinderReverse.html) once, and reuse it for - /// each search. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the needle and the haystack. That is, this runs - /// in `O(needle.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo bar baz"; - /// assert_eq!(Some(0), s.rfind("foo")); - /// assert_eq!(Some(4), s.rfind("bar")); - /// assert_eq!(Some(8), s.rfind("ba")); - /// assert_eq!(None, s.rfind("quux")); - /// ``` - #[inline] - fn rfind>(&self, needle: B) -> Option { - FinderReverse::new(needle.as_ref()).rfind(self.as_bytes()) - } - - /// Returns an iterator of the non-overlapping occurrences of the given - /// needle. The iterator yields byte offset positions indicating the start - /// of each match. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the needle and the haystack. That is, this runs - /// in `O(needle.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo bar foo foo quux foo"; - /// let matches: Vec = s.find_iter("foo").collect(); - /// assert_eq!(matches, vec![0, 8, 12, 21]); - /// ``` - /// - /// An empty string matches at every position, including the position - /// immediately following the last byte: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let matches: Vec = b"foo".find_iter("").collect(); - /// assert_eq!(matches, vec![0, 1, 2, 3]); - /// - /// let matches: Vec = b"".find_iter("").collect(); - /// assert_eq!(matches, vec![0]); - /// ``` - #[inline] - fn find_iter<'a, B: ?Sized + AsRef<[u8]>>( - &'a self, - needle: &'a B, - ) -> Find<'a> { - Find::new(self.as_bytes(), needle.as_ref()) - } - - /// Returns an iterator of the non-overlapping occurrences of the given - /// needle in reverse. The iterator yields byte offset positions indicating - /// the start of each match. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the needle and the haystack. That is, this runs - /// in `O(needle.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo bar foo foo quux foo"; - /// let matches: Vec = s.rfind_iter("foo").collect(); - /// assert_eq!(matches, vec![21, 12, 8, 0]); - /// ``` - /// - /// An empty string matches at every position, including the position - /// immediately following the last byte: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let matches: Vec = b"foo".rfind_iter("").collect(); - /// assert_eq!(matches, vec![3, 2, 1, 0]); - /// - /// let matches: Vec = b"".rfind_iter("").collect(); - /// assert_eq!(matches, vec![0]); - /// ``` - #[inline] - fn rfind_iter<'a, B: ?Sized + AsRef<[u8]>>( - &'a self, - needle: &'a B, - ) -> FindReverse<'a> { - FindReverse::new(self.as_bytes(), needle.as_ref()) - } - - /// Returns the index of the first occurrence of the given byte. If the - /// byte does not occur in this byte string, then `None` is returned. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(Some(10), b"foo bar baz".find_byte(b'z')); - /// assert_eq!(None, b"foo bar baz".find_byte(b'y')); - /// ``` - #[inline] - fn find_byte(&self, byte: u8) -> Option { - memchr(byte, self.as_bytes()) - } - - /// Returns the index of the last occurrence of the given byte. If the - /// byte does not occur in this byte string, then `None` is returned. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(Some(10), b"foo bar baz".rfind_byte(b'z')); - /// assert_eq!(None, b"foo bar baz".rfind_byte(b'y')); - /// ``` - #[inline] - fn rfind_byte(&self, byte: u8) -> Option { - memrchr(byte, self.as_bytes()) - } - - /// Returns the index of the first occurrence of the given codepoint. - /// If the codepoint does not occur in this byte string, then `None` is - /// returned. - /// - /// Note that if one searches for the replacement codepoint, `\u{FFFD}`, - /// then only explicit occurrences of that encoding will be found. Invalid - /// UTF-8 sequences will not be matched. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// assert_eq!(Some(10), b"foo bar baz".find_char('z')); - /// assert_eq!(Some(4), B("αβγγδ").find_char('γ')); - /// assert_eq!(None, b"foo bar baz".find_char('y')); - /// ``` - #[inline] - fn find_char(&self, ch: char) -> Option { - self.find(ch.encode_utf8(&mut [0; 4])) - } - - /// Returns the index of the last occurrence of the given codepoint. - /// If the codepoint does not occur in this byte string, then `None` is - /// returned. - /// - /// Note that if one searches for the replacement codepoint, `\u{FFFD}`, - /// then only explicit occurrences of that encoding will be found. Invalid - /// UTF-8 sequences will not be matched. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// assert_eq!(Some(10), b"foo bar baz".rfind_char('z')); - /// assert_eq!(Some(6), B("αβγγδ").rfind_char('γ')); - /// assert_eq!(None, b"foo bar baz".rfind_char('y')); - /// ``` - #[inline] - fn rfind_char(&self, ch: char) -> Option { - self.rfind(ch.encode_utf8(&mut [0; 4])) - } - - /// Returns the index of the first occurrence of any of the bytes in the - /// provided set. - /// - /// The `byteset` may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but - /// note that passing a `&str` which contains multibyte characters may not - /// behave as you expect: each byte in the `&str` is treated as an - /// individual member of the byte set. - /// - /// Note that order is irrelevant for the `byteset` parameter, and - /// duplicate bytes present in its body are ignored. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the set of bytes and the haystack. That is, this - /// runs in `O(byteset.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(b"foo bar baz".find_byteset(b"zr"), Some(6)); - /// assert_eq!(b"foo baz bar".find_byteset(b"bzr"), Some(4)); - /// assert_eq!(None, b"foo baz bar".find_byteset(b"\t\n")); - /// ``` - #[inline] - fn find_byteset>(&self, byteset: B) -> Option { - byteset::find(self.as_bytes(), byteset.as_ref()) - } - - /// Returns the index of the first occurrence of a byte that is not a member - /// of the provided set. - /// - /// The `byteset` may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but - /// note that passing a `&str` which contains multibyte characters may not - /// behave as you expect: each byte in the `&str` is treated as an - /// individual member of the byte set. - /// - /// Note that order is irrelevant for the `byteset` parameter, and - /// duplicate bytes present in its body are ignored. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the set of bytes and the haystack. That is, this - /// runs in `O(byteset.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(b"foo bar baz".find_not_byteset(b"fo "), Some(4)); - /// assert_eq!(b"\t\tbaz bar".find_not_byteset(b" \t\r\n"), Some(2)); - /// assert_eq!(b"foo\nbaz\tbar".find_not_byteset(b"\t\n"), Some(0)); - /// ``` - #[inline] - fn find_not_byteset>(&self, byteset: B) -> Option { - byteset::find_not(self.as_bytes(), byteset.as_ref()) - } - - /// Returns the index of the last occurrence of any of the bytes in the - /// provided set. - /// - /// The `byteset` may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but - /// note that passing a `&str` which contains multibyte characters may not - /// behave as you expect: each byte in the `&str` is treated as an - /// individual member of the byte set. - /// - /// Note that order is irrelevant for the `byteset` parameter, and duplicate - /// bytes present in its body are ignored. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the set of bytes and the haystack. That is, this - /// runs in `O(byteset.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(b"foo bar baz".rfind_byteset(b"agb"), Some(9)); - /// assert_eq!(b"foo baz bar".rfind_byteset(b"rabz "), Some(10)); - /// assert_eq!(b"foo baz bar".rfind_byteset(b"\n123"), None); - /// ``` - #[inline] - fn rfind_byteset>(&self, byteset: B) -> Option { - byteset::rfind(self.as_bytes(), byteset.as_ref()) - } - - /// Returns the index of the last occurrence of a byte that is not a member - /// of the provided set. - /// - /// The `byteset` may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but - /// note that passing a `&str` which contains multibyte characters may not - /// behave as you expect: each byte in the `&str` is treated as an - /// individual member of the byte set. - /// - /// Note that order is irrelevant for the `byteset` parameter, and - /// duplicate bytes present in its body are ignored. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the set of bytes and the haystack. That is, this - /// runs in `O(byteset.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(b"foo bar baz,\t".rfind_not_byteset(b",\t"), Some(10)); - /// assert_eq!(b"foo baz bar".rfind_not_byteset(b"rabz "), Some(2)); - /// assert_eq!(None, b"foo baz bar".rfind_not_byteset(b"barfoz ")); - /// ``` - #[inline] - fn rfind_not_byteset>(&self, byteset: B) -> Option { - byteset::rfind_not(self.as_bytes(), byteset.as_ref()) - } - - /// Returns an iterator over the fields in a byte string, separated by - /// contiguous whitespace. - /// - /// # Example - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(" foo\tbar\t\u{2003}\nquux \n"); - /// let fields: Vec<&[u8]> = s.fields().collect(); - /// assert_eq!(fields, vec![B("foo"), B("bar"), B("quux")]); - /// ``` - /// - /// A byte string consisting of just whitespace yields no elements: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// assert_eq!(0, B(" \n\t\u{2003}\n \t").fields().count()); - /// ``` - #[inline] - fn fields(&self) -> Fields<'_> { - Fields::new(self.as_bytes()) - } - - /// Returns an iterator over the fields in a byte string, separated by - /// contiguous codepoints satisfying the given predicate. - /// - /// If this byte string is not valid UTF-8, then the given closure will - /// be called with a Unicode replacement codepoint when invalid UTF-8 - /// bytes are seen. - /// - /// # Example - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = b"123foo999999bar1quux123456"; - /// let fields: Vec<&[u8]> = s.fields_with(|c| c.is_numeric()).collect(); - /// assert_eq!(fields, vec![B("foo"), B("bar"), B("quux")]); - /// ``` - /// - /// A byte string consisting of all codepoints satisfying the predicate - /// yields no elements: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(0, b"1911354563".fields_with(|c| c.is_numeric()).count()); - /// ``` - #[inline] - fn fields_with bool>(&self, f: F) -> FieldsWith<'_, F> { - FieldsWith::new(self.as_bytes(), f) - } - - /// Returns an iterator over substrings of this byte string, separated - /// by the given byte string. Each element yielded is guaranteed not to - /// include the splitter substring. - /// - /// The splitter may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"Mary had a little lamb".split_str(" ").collect(); - /// assert_eq!(x, vec![ - /// B("Mary"), B("had"), B("a"), B("little"), B("lamb"), - /// ]); - /// - /// let x: Vec<&[u8]> = b"".split_str("X").collect(); - /// assert_eq!(x, vec![b""]); - /// - /// let x: Vec<&[u8]> = b"lionXXtigerXleopard".split_str("X").collect(); - /// assert_eq!(x, vec![B("lion"), B(""), B("tiger"), B("leopard")]); - /// - /// let x: Vec<&[u8]> = b"lion::tiger::leopard".split_str("::").collect(); - /// assert_eq!(x, vec![B("lion"), B("tiger"), B("leopard")]); - /// ``` - /// - /// If a string contains multiple contiguous separators, you will end up - /// with empty strings yielded by the iterator: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"||||a||b|c".split_str("|").collect(); - /// assert_eq!(x, vec![ - /// B(""), B(""), B(""), B(""), B("a"), B(""), B("b"), B("c"), - /// ]); - /// - /// let x: Vec<&[u8]> = b"(///)".split_str("/").collect(); - /// assert_eq!(x, vec![B("("), B(""), B(""), B(")")]); - /// ``` - /// - /// Separators at the start or end of a string are neighbored by empty - /// strings. - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"010".split_str("0").collect(); - /// assert_eq!(x, vec![B(""), B("1"), B("")]); - /// ``` - /// - /// When the empty string is used as a separator, it splits every **byte** - /// in the byte string, along with the beginning and end of the byte - /// string. - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"rust".split_str("").collect(); - /// assert_eq!(x, vec![ - /// B(""), B("r"), B("u"), B("s"), B("t"), B(""), - /// ]); - /// - /// // Splitting by an empty string is not UTF-8 aware. Elements yielded - /// // may not be valid UTF-8! - /// let x: Vec<&[u8]> = B("☃").split_str("").collect(); - /// assert_eq!(x, vec![ - /// B(""), B(b"\xE2"), B(b"\x98"), B(b"\x83"), B(""), - /// ]); - /// ``` - /// - /// Contiguous separators, especially whitespace, can lead to possibly - /// surprising behavior. For example, this code is correct: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b" a b c".split_str(" ").collect(); - /// assert_eq!(x, vec![ - /// B(""), B(""), B(""), B(""), B("a"), B(""), B("b"), B("c"), - /// ]); - /// ``` - /// - /// It does *not* give you `["a", "b", "c"]`. For that behavior, use - /// [`fields`](#method.fields) instead. - #[inline] - fn split_str<'a, B: ?Sized + AsRef<[u8]>>( - &'a self, - splitter: &'a B, - ) -> Split<'a> { - Split::new(self.as_bytes(), splitter.as_ref()) - } - - /// Returns an iterator over substrings of this byte string, separated by - /// the given byte string, in reverse. Each element yielded is guaranteed - /// not to include the splitter substring. - /// - /// The splitter may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = - /// b"Mary had a little lamb".rsplit_str(" ").collect(); - /// assert_eq!(x, vec![ - /// B("lamb"), B("little"), B("a"), B("had"), B("Mary"), - /// ]); - /// - /// let x: Vec<&[u8]> = b"".rsplit_str("X").collect(); - /// assert_eq!(x, vec![b""]); - /// - /// let x: Vec<&[u8]> = b"lionXXtigerXleopard".rsplit_str("X").collect(); - /// assert_eq!(x, vec![B("leopard"), B("tiger"), B(""), B("lion")]); - /// - /// let x: Vec<&[u8]> = b"lion::tiger::leopard".rsplit_str("::").collect(); - /// assert_eq!(x, vec![B("leopard"), B("tiger"), B("lion")]); - /// ``` - /// - /// If a string contains multiple contiguous separators, you will end up - /// with empty strings yielded by the iterator: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"||||a||b|c".rsplit_str("|").collect(); - /// assert_eq!(x, vec![ - /// B("c"), B("b"), B(""), B("a"), B(""), B(""), B(""), B(""), - /// ]); - /// - /// let x: Vec<&[u8]> = b"(///)".rsplit_str("/").collect(); - /// assert_eq!(x, vec![B(")"), B(""), B(""), B("(")]); - /// ``` - /// - /// Separators at the start or end of a string are neighbored by empty - /// strings. - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"010".rsplit_str("0").collect(); - /// assert_eq!(x, vec![B(""), B("1"), B("")]); - /// ``` - /// - /// When the empty string is used as a separator, it splits every **byte** - /// in the byte string, along with the beginning and end of the byte - /// string. - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b"rust".rsplit_str("").collect(); - /// assert_eq!(x, vec![ - /// B(""), B("t"), B("s"), B("u"), B("r"), B(""), - /// ]); - /// - /// // Splitting by an empty string is not UTF-8 aware. Elements yielded - /// // may not be valid UTF-8! - /// let x: Vec<&[u8]> = B("☃").rsplit_str("").collect(); - /// assert_eq!(x, vec![B(""), B(b"\x83"), B(b"\x98"), B(b"\xE2"), B("")]); - /// ``` - /// - /// Contiguous separators, especially whitespace, can lead to possibly - /// surprising behavior. For example, this code is correct: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<&[u8]> = b" a b c".rsplit_str(" ").collect(); - /// assert_eq!(x, vec![ - /// B("c"), B("b"), B(""), B("a"), B(""), B(""), B(""), B(""), - /// ]); - /// ``` - /// - /// It does *not* give you `["a", "b", "c"]`. - #[inline] - fn rsplit_str<'a, B: ?Sized + AsRef<[u8]>>( - &'a self, - splitter: &'a B, - ) -> SplitReverse<'a> { - SplitReverse::new(self.as_bytes(), splitter.as_ref()) - } - - /// Returns an iterator of at most `limit` substrings of this byte string, - /// separated by the given byte string. If `limit` substrings are yielded, - /// then the last substring will contain the remainder of this byte string. - /// - /// The needle may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<_> = b"Mary had a little lamb".splitn_str(3, " ").collect(); - /// assert_eq!(x, vec![B("Mary"), B("had"), B("a little lamb")]); - /// - /// let x: Vec<_> = b"".splitn_str(3, "X").collect(); - /// assert_eq!(x, vec![b""]); - /// - /// let x: Vec<_> = b"lionXXtigerXleopard".splitn_str(3, "X").collect(); - /// assert_eq!(x, vec![B("lion"), B(""), B("tigerXleopard")]); - /// - /// let x: Vec<_> = b"lion::tiger::leopard".splitn_str(2, "::").collect(); - /// assert_eq!(x, vec![B("lion"), B("tiger::leopard")]); - /// - /// let x: Vec<_> = b"abcXdef".splitn_str(1, "X").collect(); - /// assert_eq!(x, vec![B("abcXdef")]); - /// - /// let x: Vec<_> = b"abcdef".splitn_str(2, "X").collect(); - /// assert_eq!(x, vec![B("abcdef")]); - /// - /// let x: Vec<_> = b"abcXdef".splitn_str(0, "X").collect(); - /// assert!(x.is_empty()); - /// ``` - #[inline] - fn splitn_str<'a, B: ?Sized + AsRef<[u8]>>( - &'a self, - limit: usize, - splitter: &'a B, - ) -> SplitN<'a> { - SplitN::new(self.as_bytes(), splitter.as_ref(), limit) - } - - /// Returns an iterator of at most `limit` substrings of this byte string, - /// separated by the given byte string, in reverse. If `limit` substrings - /// are yielded, then the last substring will contain the remainder of this - /// byte string. - /// - /// The needle may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let x: Vec<_> = - /// b"Mary had a little lamb".rsplitn_str(3, " ").collect(); - /// assert_eq!(x, vec![B("lamb"), B("little"), B("Mary had a")]); - /// - /// let x: Vec<_> = b"".rsplitn_str(3, "X").collect(); - /// assert_eq!(x, vec![b""]); - /// - /// let x: Vec<_> = b"lionXXtigerXleopard".rsplitn_str(3, "X").collect(); - /// assert_eq!(x, vec![B("leopard"), B("tiger"), B("lionX")]); - /// - /// let x: Vec<_> = b"lion::tiger::leopard".rsplitn_str(2, "::").collect(); - /// assert_eq!(x, vec![B("leopard"), B("lion::tiger")]); - /// - /// let x: Vec<_> = b"abcXdef".rsplitn_str(1, "X").collect(); - /// assert_eq!(x, vec![B("abcXdef")]); - /// - /// let x: Vec<_> = b"abcdef".rsplitn_str(2, "X").collect(); - /// assert_eq!(x, vec![B("abcdef")]); - /// - /// let x: Vec<_> = b"abcXdef".rsplitn_str(0, "X").collect(); - /// assert!(x.is_empty()); - /// ``` - #[inline] - fn rsplitn_str<'a, B: ?Sized + AsRef<[u8]>>( - &'a self, - limit: usize, - splitter: &'a B, - ) -> SplitNReverse<'a> { - SplitNReverse::new(self.as_bytes(), splitter.as_ref(), limit) - } - - /// Replace all matches of the given needle with the given replacement, and - /// the result as a new `Vec`. - /// - /// This routine is useful as a convenience. If you need to reuse an - /// allocation, use [`replace_into`](#method.replace_into) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"this is old".replace("old", "new"); - /// assert_eq!(s, "this is new".as_bytes()); - /// ``` - /// - /// When the pattern doesn't match: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"this is old".replace("nada nada", "limonada"); - /// assert_eq!(s, "this is old".as_bytes()); - /// ``` - /// - /// When the needle is an empty string: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo".replace("", "Z"); - /// assert_eq!(s, "ZfZoZoZ".as_bytes()); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn replace, R: AsRef<[u8]>>( - &self, - needle: N, - replacement: R, - ) -> Vec { - let mut dest = Vec::with_capacity(self.as_bytes().len()); - self.replace_into(needle, replacement, &mut dest); - dest - } - - /// Replace up to `limit` matches of the given needle with the given - /// replacement, and the result as a new `Vec`. - /// - /// This routine is useful as a convenience. If you need to reuse an - /// allocation, use [`replacen_into`](#method.replacen_into) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foofoo".replacen("o", "z", 2); - /// assert_eq!(s, "fzzfoo".as_bytes()); - /// ``` - /// - /// When the pattern doesn't match: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foofoo".replacen("a", "z", 2); - /// assert_eq!(s, "foofoo".as_bytes()); - /// ``` - /// - /// When the needle is an empty string: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo".replacen("", "Z", 2); - /// assert_eq!(s, "ZfZoo".as_bytes()); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn replacen, R: AsRef<[u8]>>( - &self, - needle: N, - replacement: R, - limit: usize, - ) -> Vec { - let mut dest = Vec::with_capacity(self.as_bytes().len()); - self.replacen_into(needle, replacement, limit, &mut dest); - dest - } - - /// Replace all matches of the given needle with the given replacement, - /// and write the result into the provided `Vec`. - /// - /// This does **not** clear `dest` before writing to it. - /// - /// This routine is useful for reusing allocation. For a more convenient - /// API, use [`replace`](#method.replace) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"this is old"; - /// - /// let mut dest = vec![]; - /// s.replace_into("old", "new", &mut dest); - /// assert_eq!(dest, "this is new".as_bytes()); - /// ``` - /// - /// When the pattern doesn't match: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"this is old"; - /// - /// let mut dest = vec![]; - /// s.replace_into("nada nada", "limonada", &mut dest); - /// assert_eq!(dest, "this is old".as_bytes()); - /// ``` - /// - /// When the needle is an empty string: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo"; - /// - /// let mut dest = vec![]; - /// s.replace_into("", "Z", &mut dest); - /// assert_eq!(dest, "ZfZoZoZ".as_bytes()); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn replace_into, R: AsRef<[u8]>>( - &self, - needle: N, - replacement: R, - dest: &mut Vec, - ) { - let (needle, replacement) = (needle.as_ref(), replacement.as_ref()); - - let mut last = 0; - for start in self.find_iter(needle) { - dest.push_str(&self.as_bytes()[last..start]); - dest.push_str(replacement); - last = start + needle.len(); - } - dest.push_str(&self.as_bytes()[last..]); - } - - /// Replace up to `limit` matches of the given needle with the given - /// replacement, and write the result into the provided `Vec`. - /// - /// This does **not** clear `dest` before writing to it. - /// - /// This routine is useful for reusing allocation. For a more convenient - /// API, use [`replacen`](#method.replacen) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foofoo"; - /// - /// let mut dest = vec![]; - /// s.replacen_into("o", "z", 2, &mut dest); - /// assert_eq!(dest, "fzzfoo".as_bytes()); - /// ``` - /// - /// When the pattern doesn't match: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foofoo"; - /// - /// let mut dest = vec![]; - /// s.replacen_into("a", "z", 2, &mut dest); - /// assert_eq!(dest, "foofoo".as_bytes()); - /// ``` - /// - /// When the needle is an empty string: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foo"; - /// - /// let mut dest = vec![]; - /// s.replacen_into("", "Z", 2, &mut dest); - /// assert_eq!(dest, "ZfZoo".as_bytes()); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn replacen_into, R: AsRef<[u8]>>( - &self, - needle: N, - replacement: R, - limit: usize, - dest: &mut Vec, - ) { - let (needle, replacement) = (needle.as_ref(), replacement.as_ref()); - - let mut last = 0; - for start in self.find_iter(needle).take(limit) { - dest.push_str(&self.as_bytes()[last..start]); - dest.push_str(replacement); - last = start + needle.len(); - } - dest.push_str(&self.as_bytes()[last..]); - } - - /// Returns an iterator over the bytes in this byte string. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"foobar"; - /// let bytes: Vec = bs.bytes().collect(); - /// assert_eq!(bytes, bs); - /// ``` - #[inline] - fn bytes(&self) -> Bytes<'_> { - Bytes { it: self.as_bytes().iter() } - } - - /// Returns an iterator over the Unicode scalar values in this byte string. - /// If invalid UTF-8 is encountered, then the Unicode replacement codepoint - /// is yielded instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; - /// let chars: Vec = bs.chars().collect(); - /// assert_eq!(vec!['☃', '\u{FFFD}', '𝞃', '\u{FFFD}', 'a'], chars); - /// ``` - /// - /// Codepoints can also be iterated over in reverse: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; - /// let chars: Vec = bs.chars().rev().collect(); - /// assert_eq!(vec!['a', '\u{FFFD}', '𝞃', '\u{FFFD}', '☃'], chars); - /// ``` - #[inline] - fn chars(&self) -> Chars<'_> { - Chars::new(self.as_bytes()) - } - - /// Returns an iterator over the Unicode scalar values in this byte string - /// along with their starting and ending byte index positions. If invalid - /// UTF-8 is encountered, then the Unicode replacement codepoint is yielded - /// instead. - /// - /// Note that this is slightly different from the `CharIndices` iterator - /// provided by the standard library. Aside from working on possibly - /// invalid UTF-8, this iterator provides both the corresponding starting - /// and ending byte indices of each codepoint yielded. The ending position - /// is necessary to slice the original byte string when invalid UTF-8 bytes - /// are converted into a Unicode replacement codepoint, since a single - /// replacement codepoint can substitute anywhere from 1 to 3 invalid bytes - /// (inclusive). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; - /// let chars: Vec<(usize, usize, char)> = bs.char_indices().collect(); - /// assert_eq!(chars, vec![ - /// (0, 3, '☃'), - /// (3, 4, '\u{FFFD}'), - /// (4, 8, '𝞃'), - /// (8, 10, '\u{FFFD}'), - /// (10, 11, 'a'), - /// ]); - /// ``` - /// - /// Codepoints can also be iterated over in reverse: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; - /// let chars: Vec<(usize, usize, char)> = bs - /// .char_indices() - /// .rev() - /// .collect(); - /// assert_eq!(chars, vec![ - /// (10, 11, 'a'), - /// (8, 10, '\u{FFFD}'), - /// (4, 8, '𝞃'), - /// (3, 4, '\u{FFFD}'), - /// (0, 3, '☃'), - /// ]); - /// ``` - #[inline] - fn char_indices(&self) -> CharIndices<'_> { - CharIndices::new(self.as_bytes()) - } - - /// Iterate over chunks of valid UTF-8. - /// - /// The iterator returned yields chunks of valid UTF-8 separated by invalid - /// UTF-8 bytes, if they exist. Invalid UTF-8 bytes are always 1-3 bytes, - /// which are determined via the "substitution of maximal subparts" - /// strategy described in the docs for the - /// [`ByteSlice::to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) - /// method. - /// - /// # Examples - /// - /// This example shows how to gather all valid and invalid chunks from a - /// byte slice: - /// - /// ``` - /// use bstr::{ByteSlice, Utf8Chunk}; - /// - /// let bytes = b"foo\xFD\xFEbar\xFF"; - /// - /// let (mut valid_chunks, mut invalid_chunks) = (vec![], vec![]); - /// for chunk in bytes.utf8_chunks() { - /// if !chunk.valid().is_empty() { - /// valid_chunks.push(chunk.valid()); - /// } - /// if !chunk.invalid().is_empty() { - /// invalid_chunks.push(chunk.invalid()); - /// } - /// } - /// - /// assert_eq!(valid_chunks, vec!["foo", "bar"]); - /// assert_eq!(invalid_chunks, vec![b"\xFD", b"\xFE", b"\xFF"]); - /// ``` - #[inline] - fn utf8_chunks(&self) -> Utf8Chunks<'_> { - Utf8Chunks { bytes: self.as_bytes() } - } - - /// Returns an iterator over the grapheme clusters in this byte string. - /// If invalid UTF-8 is encountered, then the Unicode replacement codepoint - /// is yielded instead. - /// - /// # Examples - /// - /// This example shows how multiple codepoints can combine to form a - /// single grapheme cluster: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = "a\u{0300}\u{0316}\u{1F1FA}\u{1F1F8}".as_bytes(); - /// let graphemes: Vec<&str> = bs.graphemes().collect(); - /// assert_eq!(vec!["à̖", "🇺🇸"], graphemes); - /// ``` - /// - /// This shows that graphemes can be iterated over in reverse: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = "a\u{0300}\u{0316}\u{1F1FA}\u{1F1F8}".as_bytes(); - /// let graphemes: Vec<&str> = bs.graphemes().rev().collect(); - /// assert_eq!(vec!["🇺🇸", "à̖"], graphemes); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn graphemes(&self) -> Graphemes<'_> { - Graphemes::new(self.as_bytes()) - } - - /// Returns an iterator over the grapheme clusters in this byte string - /// along with their starting and ending byte index positions. If invalid - /// UTF-8 is encountered, then the Unicode replacement codepoint is yielded - /// instead. - /// - /// # Examples - /// - /// This example shows how to get the byte offsets of each individual - /// grapheme cluster: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = "a\u{0300}\u{0316}\u{1F1FA}\u{1F1F8}".as_bytes(); - /// let graphemes: Vec<(usize, usize, &str)> = - /// bs.grapheme_indices().collect(); - /// assert_eq!(vec![(0, 5, "à̖"), (5, 13, "🇺🇸")], graphemes); - /// ``` - /// - /// This example shows what happens when invalid UTF-8 is enountered. Note - /// that the offsets are valid indices into the original string, and do - /// not necessarily correspond to the length of the `&str` returned! - /// - /// ``` - /// use bstr::{ByteSlice, ByteVec}; - /// - /// let mut bytes = vec![]; - /// bytes.push_str("a\u{0300}\u{0316}"); - /// bytes.push(b'\xFF'); - /// bytes.push_str("\u{1F1FA}\u{1F1F8}"); - /// - /// let graphemes: Vec<(usize, usize, &str)> = - /// bytes.grapheme_indices().collect(); - /// assert_eq!( - /// graphemes, - /// vec![(0, 5, "à̖"), (5, 6, "\u{FFFD}"), (6, 14, "🇺🇸")] - /// ); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn grapheme_indices(&self) -> GraphemeIndices<'_> { - GraphemeIndices::new(self.as_bytes()) - } - - /// Returns an iterator over the words in this byte string. If invalid - /// UTF-8 is encountered, then the Unicode replacement codepoint is yielded - /// instead. - /// - /// This is similar to - /// [`words_with_breaks`](trait.ByteSlice.html#method.words_with_breaks), - /// except it only returns elements that contain a "word" character. A word - /// character is defined by UTS #18 (Annex C) to be the combination of the - /// `Alphabetic` and `Join_Control` properties, along with the - /// `Decimal_Number`, `Mark` and `Connector_Punctuation` general - /// categories. - /// - /// Since words are made up of one or more codepoints, this iterator - /// yields `&str` elements. When invalid UTF-8 is encountered, replacement - /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = br#"The quick ("brown") fox can't jump 32.3 feet, right?"#; - /// let words: Vec<&str> = bs.words().collect(); - /// assert_eq!(words, vec![ - /// "The", "quick", "brown", "fox", "can't", - /// "jump", "32.3", "feet", "right", - /// ]); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn words(&self) -> Words<'_> { - Words::new(self.as_bytes()) - } - - /// Returns an iterator over the words in this byte string along with - /// their starting and ending byte index positions. - /// - /// This is similar to - /// [`words_with_break_indices`](trait.ByteSlice.html#method.words_with_break_indices), - /// except it only returns elements that contain a "word" character. A word - /// character is defined by UTS #18 (Annex C) to be the combination of the - /// `Alphabetic` and `Join_Control` properties, along with the - /// `Decimal_Number`, `Mark` and `Connector_Punctuation` general - /// categories. - /// - /// Since words are made up of one or more codepoints, this iterator - /// yields `&str` elements. When invalid UTF-8 is encountered, replacement - /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). - /// - /// # Examples - /// - /// This example shows how to get the byte offsets of each individual - /// word: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"can't jump 32.3 feet"; - /// let words: Vec<(usize, usize, &str)> = bs.word_indices().collect(); - /// assert_eq!(words, vec![ - /// (0, 5, "can't"), - /// (6, 10, "jump"), - /// (11, 15, "32.3"), - /// (16, 20, "feet"), - /// ]); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn word_indices(&self) -> WordIndices<'_> { - WordIndices::new(self.as_bytes()) - } - - /// Returns an iterator over the words in this byte string, along with - /// all breaks between the words. Concatenating all elements yielded by - /// the iterator results in the original string (modulo Unicode replacement - /// codepoint substitutions if invalid UTF-8 is encountered). - /// - /// Since words are made up of one or more codepoints, this iterator - /// yields `&str` elements. When invalid UTF-8 is encountered, replacement - /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = br#"The quick ("brown") fox can't jump 32.3 feet, right?"#; - /// let words: Vec<&str> = bs.words_with_breaks().collect(); - /// assert_eq!(words, vec![ - /// "The", " ", "quick", " ", "(", "\"", "brown", "\"", ")", - /// " ", "fox", " ", "can't", " ", "jump", " ", "32.3", " ", "feet", - /// ",", " ", "right", "?", - /// ]); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn words_with_breaks(&self) -> WordsWithBreaks<'_> { - WordsWithBreaks::new(self.as_bytes()) - } - - /// Returns an iterator over the words and their byte offsets in this - /// byte string, along with all breaks between the words. Concatenating - /// all elements yielded by the iterator results in the original string - /// (modulo Unicode replacement codepoint substitutions if invalid UTF-8 is - /// encountered). - /// - /// Since words are made up of one or more codepoints, this iterator - /// yields `&str` elements. When invalid UTF-8 is encountered, replacement - /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). - /// - /// # Examples - /// - /// This example shows how to get the byte offsets of each individual - /// word: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"can't jump 32.3 feet"; - /// let words: Vec<(usize, usize, &str)> = - /// bs.words_with_break_indices().collect(); - /// assert_eq!(words, vec![ - /// (0, 5, "can't"), - /// (5, 6, " "), - /// (6, 10, "jump"), - /// (10, 11, " "), - /// (11, 15, "32.3"), - /// (15, 16, " "), - /// (16, 20, "feet"), - /// ]); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn words_with_break_indices(&self) -> WordsWithBreakIndices<'_> { - WordsWithBreakIndices::new(self.as_bytes()) - } - - /// Returns an iterator over the sentences in this byte string. - /// - /// Typically, a sentence will include its trailing punctuation and - /// whitespace. Concatenating all elements yielded by the iterator - /// results in the original string (modulo Unicode replacement codepoint - /// substitutions if invalid UTF-8 is encountered). - /// - /// Since sentences are made up of one or more codepoints, this iterator - /// yields `&str` elements. When invalid UTF-8 is encountered, replacement - /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"I want this. Not that. Right now."; - /// let sentences: Vec<&str> = bs.sentences().collect(); - /// assert_eq!(sentences, vec![ - /// "I want this. ", - /// "Not that. ", - /// "Right now.", - /// ]); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn sentences(&self) -> Sentences<'_> { - Sentences::new(self.as_bytes()) - } - - /// Returns an iterator over the sentences in this byte string along with - /// their starting and ending byte index positions. - /// - /// Typically, a sentence will include its trailing punctuation and - /// whitespace. Concatenating all elements yielded by the iterator - /// results in the original string (modulo Unicode replacement codepoint - /// substitutions if invalid UTF-8 is encountered). - /// - /// Since sentences are made up of one or more codepoints, this iterator - /// yields `&str` elements. When invalid UTF-8 is encountered, replacement - /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let bs = b"I want this. Not that. Right now."; - /// let sentences: Vec<(usize, usize, &str)> = - /// bs.sentence_indices().collect(); - /// assert_eq!(sentences, vec![ - /// (0, 13, "I want this. "), - /// (13, 23, "Not that. "), - /// (23, 33, "Right now."), - /// ]); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn sentence_indices(&self) -> SentenceIndices<'_> { - SentenceIndices::new(self.as_bytes()) - } - - /// An iterator over all lines in a byte string, without their - /// terminators. - /// - /// For this iterator, the only line terminators recognized are `\r\n` and - /// `\n`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = b"\ - /// foo - /// - /// bar\r - /// baz - /// - /// - /// quux"; - /// let lines: Vec<&[u8]> = s.lines().collect(); - /// assert_eq!(lines, vec![ - /// B("foo"), B(""), B("bar"), B("baz"), B(""), B(""), B("quux"), - /// ]); - /// ``` - #[inline] - fn lines(&self) -> Lines<'_> { - Lines::new(self.as_bytes()) - } - - /// An iterator over all lines in a byte string, including their - /// terminators. - /// - /// For this iterator, the only line terminator recognized is `\n`. (Since - /// line terminators are included, this also handles `\r\n` line endings.) - /// - /// Line terminators are only included if they are present in the original - /// byte string. For example, the last line in a byte string may not end - /// with a line terminator. - /// - /// Concatenating all elements yielded by this iterator is guaranteed to - /// yield the original byte string. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = b"\ - /// foo - /// - /// bar\r - /// baz - /// - /// - /// quux"; - /// let lines: Vec<&[u8]> = s.lines_with_terminator().collect(); - /// assert_eq!(lines, vec![ - /// B("foo\n"), - /// B("\n"), - /// B("bar\r\n"), - /// B("baz\n"), - /// B("\n"), - /// B("\n"), - /// B("quux"), - /// ]); - /// ``` - #[inline] - fn lines_with_terminator(&self) -> LinesWithTerminator<'_> { - LinesWithTerminator::new(self.as_bytes()) - } - - /// Return a byte string slice with leading and trailing whitespace - /// removed. - /// - /// Whitespace is defined according to the terms of the `White_Space` - /// Unicode property. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(" foo\tbar\t\u{2003}\n"); - /// assert_eq!(s.trim(), B("foo\tbar")); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn trim(&self) -> &[u8] { - self.trim_start().trim_end() - } - - /// Return a byte string slice with leading whitespace removed. - /// - /// Whitespace is defined according to the terms of the `White_Space` - /// Unicode property. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(" foo\tbar\t\u{2003}\n"); - /// assert_eq!(s.trim_start(), B("foo\tbar\t\u{2003}\n")); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn trim_start(&self) -> &[u8] { - let start = whitespace_len_fwd(self.as_bytes()); - &self.as_bytes()[start..] - } - - /// Return a byte string slice with trailing whitespace removed. - /// - /// Whitespace is defined according to the terms of the `White_Space` - /// Unicode property. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(" foo\tbar\t\u{2003}\n"); - /// assert_eq!(s.trim_end(), B(" foo\tbar")); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn trim_end(&self) -> &[u8] { - let end = whitespace_len_rev(self.as_bytes()); - &self.as_bytes()[..end] - } - - /// Return a byte string slice with leading and trailing characters - /// satisfying the given predicate removed. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = b"123foo5bar789"; - /// assert_eq!(s.trim_with(|c| c.is_numeric()), B("foo5bar")); - /// ``` - #[inline] - fn trim_with bool>(&self, mut trim: F) -> &[u8] { - self.trim_start_with(&mut trim).trim_end_with(&mut trim) - } - - /// Return a byte string slice with leading characters satisfying the given - /// predicate removed. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = b"123foo5bar789"; - /// assert_eq!(s.trim_start_with(|c| c.is_numeric()), B("foo5bar789")); - /// ``` - #[inline] - fn trim_start_with bool>(&self, mut trim: F) -> &[u8] { - for (s, _, ch) in self.char_indices() { - if !trim(ch) { - return &self.as_bytes()[s..]; - } - } - b"" - } - - /// Return a byte string slice with trailing characters satisfying the - /// given predicate removed. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = b"123foo5bar789"; - /// assert_eq!(s.trim_end_with(|c| c.is_numeric()), B("123foo5bar")); - /// ``` - #[inline] - fn trim_end_with bool>(&self, mut trim: F) -> &[u8] { - for (_, e, ch) in self.char_indices().rev() { - if !trim(ch) { - return &self.as_bytes()[..e]; - } - } - b"" - } - - /// Returns a new `Vec` containing the lowercase equivalent of this - /// byte string. - /// - /// In this case, lowercase is defined according to the `Lowercase` Unicode - /// property. - /// - /// If invalid UTF-8 is seen, or if a character has no lowercase variant, - /// then it is written to the given buffer unchanged. - /// - /// Note that some characters in this byte string may expand into multiple - /// characters when changing the case, so the number of bytes written to - /// the given byte string may not be equivalent to the number of bytes in - /// this byte string. - /// - /// If you'd like to reuse an allocation for performance reasons, then use - /// [`to_lowercase_into`](#method.to_lowercase_into) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("HELLO Β"); - /// assert_eq!("hello β".as_bytes(), s.to_lowercase().as_bytes()); - /// ``` - /// - /// Scripts without case are not changed: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("农历新年"); - /// assert_eq!("农历新年".as_bytes(), s.to_lowercase().as_bytes()); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(b"FOO\xFFBAR\xE2\x98BAZ"); - /// assert_eq!(B(b"foo\xFFbar\xE2\x98baz"), s.to_lowercase().as_bytes()); - /// ``` - #[cfg(all(feature = "std", feature = "unicode"))] - #[inline] - fn to_lowercase(&self) -> Vec { - let mut buf = vec![]; - self.to_lowercase_into(&mut buf); - buf - } - - /// Writes the lowercase equivalent of this byte string into the given - /// buffer. The buffer is not cleared before written to. - /// - /// In this case, lowercase is defined according to the `Lowercase` - /// Unicode property. - /// - /// If invalid UTF-8 is seen, or if a character has no lowercase variant, - /// then it is written to the given buffer unchanged. - /// - /// Note that some characters in this byte string may expand into multiple - /// characters when changing the case, so the number of bytes written to - /// the given byte string may not be equivalent to the number of bytes in - /// this byte string. - /// - /// If you don't need to amortize allocation and instead prefer - /// convenience, then use [`to_lowercase`](#method.to_lowercase) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("HELLO Β"); - /// - /// let mut buf = vec![]; - /// s.to_lowercase_into(&mut buf); - /// assert_eq!("hello β".as_bytes(), buf.as_bytes()); - /// ``` - /// - /// Scripts without case are not changed: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("农历新年"); - /// - /// let mut buf = vec![]; - /// s.to_lowercase_into(&mut buf); - /// assert_eq!("农历新年".as_bytes(), buf.as_bytes()); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(b"FOO\xFFBAR\xE2\x98BAZ"); - /// - /// let mut buf = vec![]; - /// s.to_lowercase_into(&mut buf); - /// assert_eq!(B(b"foo\xFFbar\xE2\x98baz"), buf.as_bytes()); - /// ``` - #[cfg(all(feature = "std", feature = "unicode"))] - #[inline] - fn to_lowercase_into(&self, buf: &mut Vec) { - // TODO: This is the best we can do given what std exposes I think. - // If we roll our own case handling, then we might be able to do this - // a bit faster. We shouldn't roll our own case handling unless we - // need to, e.g., for doing caseless matching or case folding. - - // TODO(BUG): This doesn't handle any special casing rules. - - buf.reserve(self.as_bytes().len()); - for (s, e, ch) in self.char_indices() { - if ch == '\u{FFFD}' { - buf.push_str(&self.as_bytes()[s..e]); - } else if ch.is_ascii() { - buf.push_char(ch.to_ascii_lowercase()); - } else { - for upper in ch.to_lowercase() { - buf.push_char(upper); - } - } - } - } - - /// Returns a new `Vec` containing the ASCII lowercase equivalent of - /// this byte string. - /// - /// In this case, lowercase is only defined in ASCII letters. Namely, the - /// letters `A-Z` are converted to `a-z`. All other bytes remain unchanged. - /// In particular, the length of the byte string returned is always - /// equivalent to the length of this byte string. - /// - /// If you'd like to reuse an allocation for performance reasons, then use - /// [`make_ascii_lowercase`](#method.make_ascii_lowercase) to perform - /// the conversion in place. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("HELLO Β"); - /// assert_eq!("hello Β".as_bytes(), s.to_ascii_lowercase().as_bytes()); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(b"FOO\xFFBAR\xE2\x98BAZ"); - /// assert_eq!(s.to_ascii_lowercase(), B(b"foo\xFFbar\xE2\x98baz")); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_ascii_lowercase(&self) -> Vec { - self.as_bytes().to_ascii_lowercase() - } - - /// Convert this byte string to its lowercase ASCII equivalent in place. - /// - /// In this case, lowercase is only defined in ASCII letters. Namely, the - /// letters `A-Z` are converted to `a-z`. All other bytes remain unchanged. - /// - /// If you don't need to do the conversion in - /// place and instead prefer convenience, then use - /// [`to_ascii_lowercase`](#method.to_ascii_lowercase) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("HELLO Β"); - /// s.make_ascii_lowercase(); - /// assert_eq!(s, "hello Β".as_bytes()); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice, ByteVec}; - /// - /// let mut s = >::from_slice(b"FOO\xFFBAR\xE2\x98BAZ"); - /// s.make_ascii_lowercase(); - /// assert_eq!(s, B(b"foo\xFFbar\xE2\x98baz")); - /// ``` - #[inline] - fn make_ascii_lowercase(&mut self) { - self.as_bytes_mut().make_ascii_lowercase(); - } - - /// Returns a new `Vec` containing the uppercase equivalent of this - /// byte string. - /// - /// In this case, uppercase is defined according to the `Uppercase` - /// Unicode property. - /// - /// If invalid UTF-8 is seen, or if a character has no uppercase variant, - /// then it is written to the given buffer unchanged. - /// - /// Note that some characters in this byte string may expand into multiple - /// characters when changing the case, so the number of bytes written to - /// the given byte string may not be equivalent to the number of bytes in - /// this byte string. - /// - /// If you'd like to reuse an allocation for performance reasons, then use - /// [`to_uppercase_into`](#method.to_uppercase_into) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("hello β"); - /// assert_eq!(s.to_uppercase(), B("HELLO Β")); - /// ``` - /// - /// Scripts without case are not changed: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("农历新年"); - /// assert_eq!(s.to_uppercase(), B("农历新年")); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(b"foo\xFFbar\xE2\x98baz"); - /// assert_eq!(s.to_uppercase(), B(b"FOO\xFFBAR\xE2\x98BAZ")); - /// ``` - #[cfg(all(feature = "std", feature = "unicode"))] - #[inline] - fn to_uppercase(&self) -> Vec { - let mut buf = vec![]; - self.to_uppercase_into(&mut buf); - buf - } - - /// Writes the uppercase equivalent of this byte string into the given - /// buffer. The buffer is not cleared before written to. - /// - /// In this case, uppercase is defined according to the `Uppercase` - /// Unicode property. - /// - /// If invalid UTF-8 is seen, or if a character has no uppercase variant, - /// then it is written to the given buffer unchanged. - /// - /// Note that some characters in this byte string may expand into multiple - /// characters when changing the case, so the number of bytes written to - /// the given byte string may not be equivalent to the number of bytes in - /// this byte string. - /// - /// If you don't need to amortize allocation and instead prefer - /// convenience, then use [`to_uppercase`](#method.to_uppercase) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("hello β"); - /// - /// let mut buf = vec![]; - /// s.to_uppercase_into(&mut buf); - /// assert_eq!(buf, B("HELLO Β")); - /// ``` - /// - /// Scripts without case are not changed: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("农历新年"); - /// - /// let mut buf = vec![]; - /// s.to_uppercase_into(&mut buf); - /// assert_eq!(buf, B("农历新年")); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(b"foo\xFFbar\xE2\x98baz"); - /// - /// let mut buf = vec![]; - /// s.to_uppercase_into(&mut buf); - /// assert_eq!(buf, B(b"FOO\xFFBAR\xE2\x98BAZ")); - /// ``` - #[cfg(all(feature = "std", feature = "unicode"))] - #[inline] - fn to_uppercase_into(&self, buf: &mut Vec) { - // TODO: This is the best we can do given what std exposes I think. - // If we roll our own case handling, then we might be able to do this - // a bit faster. We shouldn't roll our own case handling unless we - // need to, e.g., for doing caseless matching or case folding. - buf.reserve(self.as_bytes().len()); - for (s, e, ch) in self.char_indices() { - if ch == '\u{FFFD}' { - buf.push_str(&self.as_bytes()[s..e]); - } else if ch.is_ascii() { - buf.push_char(ch.to_ascii_uppercase()); - } else { - for upper in ch.to_uppercase() { - buf.push_char(upper); - } - } - } - } - - /// Returns a new `Vec` containing the ASCII uppercase equivalent of - /// this byte string. - /// - /// In this case, uppercase is only defined in ASCII letters. Namely, the - /// letters `a-z` are converted to `A-Z`. All other bytes remain unchanged. - /// In particular, the length of the byte string returned is always - /// equivalent to the length of this byte string. - /// - /// If you'd like to reuse an allocation for performance reasons, then use - /// [`make_ascii_uppercase`](#method.make_ascii_uppercase) to perform - /// the conversion in place. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B("hello β"); - /// assert_eq!(s.to_ascii_uppercase(), B("HELLO β")); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let s = B(b"foo\xFFbar\xE2\x98baz"); - /// assert_eq!(s.to_ascii_uppercase(), B(b"FOO\xFFBAR\xE2\x98BAZ")); - /// ``` - #[cfg(feature = "std")] - #[inline] - fn to_ascii_uppercase(&self) -> Vec { - self.as_bytes().to_ascii_uppercase() - } - - /// Convert this byte string to its uppercase ASCII equivalent in place. - /// - /// In this case, uppercase is only defined in ASCII letters. Namely, the - /// letters `a-z` are converted to `A-Z`. All other bytes remain unchanged. - /// - /// If you don't need to do the conversion in - /// place and instead prefer convenience, then use - /// [`to_ascii_uppercase`](#method.to_ascii_uppercase) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let mut s = >::from("hello β"); - /// s.make_ascii_uppercase(); - /// assert_eq!(s, B("HELLO β")); - /// ``` - /// - /// Invalid UTF-8 remains as is: - /// - /// ``` - /// use bstr::{B, ByteSlice, ByteVec}; - /// - /// let mut s = >::from_slice(b"foo\xFFbar\xE2\x98baz"); - /// s.make_ascii_uppercase(); - /// assert_eq!(s, B(b"FOO\xFFBAR\xE2\x98BAZ")); - /// ``` - #[inline] - fn make_ascii_uppercase(&mut self) { - self.as_bytes_mut().make_ascii_uppercase(); - } - - /// Reverse the bytes in this string, in place. - /// - /// This is not necessarily a well formed operation! For example, if this - /// byte string contains valid UTF-8 that isn't ASCII, then reversing the - /// string will likely result in invalid UTF-8 and otherwise non-sensical - /// content. - /// - /// Note that this is equivalent to the generic `[u8]::reverse` method. - /// This method is provided to permit callers to explicitly differentiate - /// between reversing bytes, codepoints and graphemes. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("hello"); - /// s.reverse_bytes(); - /// assert_eq!(s, "olleh".as_bytes()); - /// ``` - #[inline] - fn reverse_bytes(&mut self) { - self.as_bytes_mut().reverse(); - } - - /// Reverse the codepoints in this string, in place. - /// - /// If this byte string is valid UTF-8, then its reversal by codepoint - /// is also guaranteed to be valid UTF-8. - /// - /// This operation is equivalent to the following, but without allocating: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("foo☃bar"); - /// - /// let mut chars: Vec = s.chars().collect(); - /// chars.reverse(); - /// - /// let reversed: String = chars.into_iter().collect(); - /// assert_eq!(reversed, "rab☃oof"); - /// ``` - /// - /// Note that this is not necessarily a well formed operation. For example, - /// if this byte string contains grapheme clusters with more than one - /// codepoint, then those grapheme clusters will not necessarily be - /// preserved. If you'd like to preserve grapheme clusters, then use - /// [`reverse_graphemes`](#method.reverse_graphemes) instead. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("foo☃bar"); - /// s.reverse_chars(); - /// assert_eq!(s, "rab☃oof".as_bytes()); - /// ``` - /// - /// This example shows that not all reversals lead to a well formed string. - /// For example, in this case, combining marks are used to put accents over - /// some letters, and those accent marks must appear after the codepoints - /// they modify. - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let mut s = >::from("résumé"); - /// s.reverse_chars(); - /// assert_eq!(s, B(b"\xCC\x81emus\xCC\x81er")); - /// ``` - /// - /// A word of warning: the above example relies on the fact that - /// `résumé` is in decomposed normal form, which means there are separate - /// codepoints for the accents above `e`. If it is instead in composed - /// normal form, then the example works: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let mut s = >::from("résumé"); - /// s.reverse_chars(); - /// assert_eq!(s, B("émusér")); - /// ``` - /// - /// The point here is to be cautious and not assume that just because - /// `reverse_chars` works in one case, that it therefore works in all - /// cases. - #[inline] - fn reverse_chars(&mut self) { - let mut i = 0; - loop { - let (_, size) = utf8::decode(&self.as_bytes()[i..]); - if size == 0 { - break; - } - if size > 1 { - self.as_bytes_mut()[i..i + size].reverse_bytes(); - } - i += size; - } - self.reverse_bytes(); - } - - /// Reverse the graphemes in this string, in place. - /// - /// If this byte string is valid UTF-8, then its reversal by grapheme - /// is also guaranteed to be valid UTF-8. - /// - /// This operation is equivalent to the following, but without allocating: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("foo☃bar"); - /// - /// let mut graphemes: Vec<&str> = s.graphemes().collect(); - /// graphemes.reverse(); - /// - /// let reversed = graphemes.concat(); - /// assert_eq!(reversed, "rab☃oof"); - /// ``` - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("foo☃bar"); - /// s.reverse_graphemes(); - /// assert_eq!(s, "rab☃oof".as_bytes()); - /// ``` - /// - /// This example shows how this correctly handles grapheme clusters, - /// unlike `reverse_chars`. - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut s = >::from("résumé"); - /// s.reverse_graphemes(); - /// assert_eq!(s, "émusér".as_bytes()); - /// ``` - #[cfg(feature = "unicode")] - #[inline] - fn reverse_graphemes(&mut self) { - use crate::unicode::decode_grapheme; - - let mut i = 0; - loop { - let (_, size) = decode_grapheme(&self.as_bytes()[i..]); - if size == 0 { - break; - } - if size > 1 { - self.as_bytes_mut()[i..i + size].reverse_bytes(); - } - i += size; - } - self.reverse_bytes(); - } - - /// Returns true if and only if every byte in this byte string is ASCII. - /// - /// ASCII is an encoding that defines 128 codepoints. A byte corresponds to - /// an ASCII codepoint if and only if it is in the inclusive range - /// `[0, 127]`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// assert!(B("abc").is_ascii()); - /// assert!(!B("☃βツ").is_ascii()); - /// assert!(!B(b"\xFF").is_ascii()); - /// ``` - #[inline] - fn is_ascii(&self) -> bool { - ascii::first_non_ascii_byte(self.as_bytes()) == self.as_bytes().len() - } - - /// Returns true if and only if the entire byte string is valid UTF-8. - /// - /// If you need location information about where a byte string's first - /// invalid UTF-8 byte is, then use the [`to_str`](#method.to_str) method. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// assert!(B("abc").is_utf8()); - /// assert!(B("☃βツ").is_utf8()); - /// // invalid bytes - /// assert!(!B(b"abc\xFF").is_utf8()); - /// // surrogate encoding - /// assert!(!B(b"\xED\xA0\x80").is_utf8()); - /// // incomplete sequence - /// assert!(!B(b"\xF0\x9D\x9Ca").is_utf8()); - /// // overlong sequence - /// assert!(!B(b"\xF0\x82\x82\xAC").is_utf8()); - /// ``` - #[inline] - fn is_utf8(&self) -> bool { - utf8::validate(self.as_bytes()).is_ok() - } - - /// Returns the last byte in this byte string, if it's non-empty. If this - /// byte string is empty, this returns `None`. - /// - /// Note that this is like the generic `[u8]::last`, except this returns - /// the byte by value instead of a reference to the byte. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// assert_eq!(Some(b'z'), b"baz".last_byte()); - /// assert_eq!(None, b"".last_byte()); - /// ``` - #[inline] - fn last_byte(&self) -> Option { - let bytes = self.as_bytes(); - bytes.get(bytes.len().saturating_sub(1)).map(|&b| b) - } - - /// Returns the index of the first non-ASCII byte in this byte string (if - /// any such indices exist). Specifically, it returns the index of the - /// first byte with a value greater than or equal to `0x80`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{ByteSlice, B}; - /// - /// assert_eq!(Some(3), b"abc\xff".find_non_ascii_byte()); - /// assert_eq!(None, b"abcde".find_non_ascii_byte()); - /// assert_eq!(Some(0), B("😀").find_non_ascii_byte()); - /// ``` - #[inline] - fn find_non_ascii_byte(&self) -> Option { - let index = ascii::first_non_ascii_byte(self.as_bytes()); - if index == self.as_bytes().len() { - None - } else { - Some(index) - } - } - - /// Copies elements from one part of the slice to another part of itself, - /// where the parts may be overlapping. - /// - /// `src` is the range within this byte string to copy from, while `dest` - /// is the starting index of the range within this byte string to copy to. - /// The length indicated by `src` must be less than or equal to the number - /// of bytes from `dest` to the end of the byte string. - /// - /// # Panics - /// - /// Panics if either range is out of bounds, or if `src` is too big to fit - /// into `dest`, or if the end of `src` is before the start. - /// - /// # Examples - /// - /// Copying four bytes within a byte string: - /// - /// ``` - /// use bstr::{B, ByteSlice}; - /// - /// let mut buf = *b"Hello, World!"; - /// let s = &mut buf; - /// s.copy_within_str(1..5, 8); - /// assert_eq!(s, B("Hello, Wello!")); - /// ``` - #[inline] - fn copy_within_str(&mut self, src: R, dest: usize) - where - R: ops::RangeBounds, - { - // TODO: Deprecate this once slice::copy_within stabilizes. - let src_start = match src.start_bound() { - ops::Bound::Included(&n) => n, - ops::Bound::Excluded(&n) => { - n.checked_add(1).expect("attempted to index slice beyond max") - } - ops::Bound::Unbounded => 0, - }; - let src_end = match src.end_bound() { - ops::Bound::Included(&n) => { - n.checked_add(1).expect("attempted to index slice beyond max") - } - ops::Bound::Excluded(&n) => n, - ops::Bound::Unbounded => self.as_bytes().len(), - }; - assert!(src_start <= src_end, "src end is before src start"); - assert!(src_end <= self.as_bytes().len(), "src is out of bounds"); - let count = src_end - src_start; - assert!( - dest <= self.as_bytes().len() - count, - "dest is out of bounds", - ); - - // SAFETY: This is safe because we use ptr::copy to handle overlapping - // copies, and is also safe because we've checked all the bounds above. - // Finally, we are only dealing with u8 data, which is Copy, which - // means we can copy without worrying about ownership/destructors. - unsafe { - ptr::copy( - self.as_bytes().get_unchecked(src_start), - self.as_bytes_mut().get_unchecked_mut(dest), - count, - ); - } - } -} - -/// A single substring searcher fixed to a particular needle. -/// -/// The purpose of this type is to permit callers to construct a substring -/// searcher that can be used to search haystacks without the overhead of -/// constructing the searcher in the first place. This is a somewhat niche -/// concern when it's necessary to re-use the same needle to search multiple -/// different haystacks with as little overhead as possible. In general, using -/// [`ByteSlice::find`](trait.ByteSlice.html#method.find) -/// or -/// [`ByteSlice::find_iter`](trait.ByteSlice.html#method.find_iter) -/// is good enough, but `Finder` is useful when you can meaningfully observe -/// searcher construction time in a profile. -/// -/// When the `std` feature is enabled, then this type has an `into_owned` -/// version which permits building a `Finder` that is not connected to the -/// lifetime of its needle. -#[derive(Clone, Debug)] -pub struct Finder<'a>(memmem::Finder<'a>); - -impl<'a> Finder<'a> { - /// Create a new finder for the given needle. - #[inline] - pub fn new>(needle: &'a B) -> Finder<'a> { - Finder(memmem::Finder::new(needle.as_ref())) - } - - /// Convert this finder into its owned variant, such that it no longer - /// borrows the needle. - /// - /// If this is already an owned finder, then this is a no-op. Otherwise, - /// this copies the needle. - /// - /// This is only available when the `std` feature is enabled. - #[cfg(feature = "std")] - #[inline] - pub fn into_owned(self) -> Finder<'static> { - Finder(self.0.into_owned()) - } - - /// Returns the needle that this finder searches for. - /// - /// Note that the lifetime of the needle returned is tied to the lifetime - /// of the finder, and may be shorter than the `'a` lifetime. Namely, a - /// finder's needle can be either borrowed or owned, so the lifetime of the - /// needle returned must necessarily be the shorter of the two. - #[inline] - pub fn needle(&self) -> &[u8] { - self.0.needle() - } - - /// Returns the index of the first occurrence of this needle in the given - /// haystack. - /// - /// The haystack may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the needle and the haystack. That is, this runs - /// in `O(needle.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::Finder; - /// - /// let haystack = "foo bar baz"; - /// assert_eq!(Some(0), Finder::new("foo").find(haystack)); - /// assert_eq!(Some(4), Finder::new("bar").find(haystack)); - /// assert_eq!(None, Finder::new("quux").find(haystack)); - /// ``` - #[inline] - pub fn find>(&self, haystack: B) -> Option { - self.0.find(haystack.as_ref()) - } -} - -/// A single substring reverse searcher fixed to a particular needle. -/// -/// The purpose of this type is to permit callers to construct a substring -/// searcher that can be used to search haystacks without the overhead of -/// constructing the searcher in the first place. This is a somewhat niche -/// concern when it's necessary to re-use the same needle to search multiple -/// different haystacks with as little overhead as possible. In general, using -/// [`ByteSlice::rfind`](trait.ByteSlice.html#method.rfind) -/// or -/// [`ByteSlice::rfind_iter`](trait.ByteSlice.html#method.rfind_iter) -/// is good enough, but `FinderReverse` is useful when you can meaningfully -/// observe searcher construction time in a profile. -/// -/// When the `std` feature is enabled, then this type has an `into_owned` -/// version which permits building a `FinderReverse` that is not connected to -/// the lifetime of its needle. -#[derive(Clone, Debug)] -pub struct FinderReverse<'a>(memmem::FinderRev<'a>); - -impl<'a> FinderReverse<'a> { - /// Create a new reverse finder for the given needle. - #[inline] - pub fn new>(needle: &'a B) -> FinderReverse<'a> { - FinderReverse(memmem::FinderRev::new(needle.as_ref())) - } - - /// Convert this finder into its owned variant, such that it no longer - /// borrows the needle. - /// - /// If this is already an owned finder, then this is a no-op. Otherwise, - /// this copies the needle. - /// - /// This is only available when the `std` feature is enabled. - #[cfg(feature = "std")] - #[inline] - pub fn into_owned(self) -> FinderReverse<'static> { - FinderReverse(self.0.into_owned()) - } - - /// Returns the needle that this finder searches for. - /// - /// Note that the lifetime of the needle returned is tied to the lifetime - /// of this finder, and may be shorter than the `'a` lifetime. Namely, - /// a finder's needle can be either borrowed or owned, so the lifetime of - /// the needle returned must necessarily be the shorter of the two. - #[inline] - pub fn needle(&self) -> &[u8] { - self.0.needle() - } - - /// Returns the index of the last occurrence of this needle in the given - /// haystack. - /// - /// The haystack may be any type that can be cheaply converted into a - /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. - /// - /// # Complexity - /// - /// This routine is guaranteed to have worst case linear time complexity - /// with respect to both the needle and the haystack. That is, this runs - /// in `O(needle.len() + haystack.len())` time. - /// - /// This routine is also guaranteed to have worst case constant space - /// complexity. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::FinderReverse; - /// - /// let haystack = "foo bar baz"; - /// assert_eq!(Some(0), FinderReverse::new("foo").rfind(haystack)); - /// assert_eq!(Some(4), FinderReverse::new("bar").rfind(haystack)); - /// assert_eq!(None, FinderReverse::new("quux").rfind(haystack)); - /// ``` - #[inline] - pub fn rfind>(&self, haystack: B) -> Option { - self.0.rfind(haystack.as_ref()) - } -} - -/// An iterator over non-overlapping substring matches. -/// -/// Matches are reported by the byte offset at which they begin. -/// -/// `'a` is the shorter of two lifetimes: the byte string being searched or the -/// byte string being looked for. -#[derive(Debug)] -pub struct Find<'a> { - it: memmem::FindIter<'a, 'a>, - haystack: &'a [u8], - needle: &'a [u8], -} - -impl<'a> Find<'a> { - fn new(haystack: &'a [u8], needle: &'a [u8]) -> Find<'a> { - Find { it: memmem::find_iter(haystack, needle), haystack, needle } - } -} - -impl<'a> Iterator for Find<'a> { - type Item = usize; - - #[inline] - fn next(&mut self) -> Option { - self.it.next() - } -} - -/// An iterator over non-overlapping substring matches in reverse. -/// -/// Matches are reported by the byte offset at which they begin. -/// -/// `'a` is the shorter of two lifetimes: the byte string being searched or the -/// byte string being looked for. -#[derive(Debug)] -pub struct FindReverse<'a> { - it: memmem::FindRevIter<'a, 'a>, - haystack: &'a [u8], - needle: &'a [u8], -} - -impl<'a> FindReverse<'a> { - fn new(haystack: &'a [u8], needle: &'a [u8]) -> FindReverse<'a> { - FindReverse { - it: memmem::rfind_iter(haystack, needle), - haystack, - needle, - } - } - - fn haystack(&self) -> &'a [u8] { - self.haystack - } - - fn needle(&self) -> &[u8] { - self.needle - } -} - -impl<'a> Iterator for FindReverse<'a> { - type Item = usize; - - #[inline] - fn next(&mut self) -> Option { - self.it.next() - } -} - -/// An iterator over the bytes in a byte string. -/// -/// `'a` is the lifetime of the byte string being traversed. -#[derive(Clone, Debug)] -pub struct Bytes<'a> { - it: slice::Iter<'a, u8>, -} - -impl<'a> Bytes<'a> { - /// Views the remaining underlying data as a subslice of the original data. - /// This has the same lifetime as the original slice, - /// and so the iterator can continue to be used while this exists. - #[inline] - pub fn as_slice(&self) -> &'a [u8] { - self.it.as_slice() - } -} - -impl<'a> Iterator for Bytes<'a> { - type Item = u8; - - #[inline] - fn next(&mut self) -> Option { - self.it.next().map(|&b| b) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.it.size_hint() - } -} - -impl<'a> DoubleEndedIterator for Bytes<'a> { - #[inline] - fn next_back(&mut self) -> Option { - self.it.next_back().map(|&b| b) - } -} - -impl<'a> ExactSizeIterator for Bytes<'a> { - #[inline] - fn len(&self) -> usize { - self.it.len() - } -} - -impl<'a> iter::FusedIterator for Bytes<'a> {} - -/// An iterator over the fields in a byte string, separated by whitespace. -/// -/// This iterator splits on contiguous runs of whitespace, such that the fields -/// in `foo\t\t\n \nbar` are `foo` and `bar`. -/// -/// `'a` is the lifetime of the byte string being split. -#[derive(Debug)] -pub struct Fields<'a> { - it: FieldsWith<'a, fn(char) -> bool>, -} - -impl<'a> Fields<'a> { - fn new(bytes: &'a [u8]) -> Fields<'a> { - Fields { it: bytes.fields_with(|ch| ch.is_whitespace()) } - } -} - -impl<'a> Iterator for Fields<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - self.it.next() - } -} - -/// An iterator over fields in the byte string, separated by a predicate over -/// codepoints. -/// -/// This iterator splits a byte string based on its predicate function such -/// that the elements returned are separated by contiguous runs of codepoints -/// for which the predicate returns true. -/// -/// `'a` is the lifetime of the byte string being split, while `F` is the type -/// of the predicate, i.e., `FnMut(char) -> bool`. -#[derive(Debug)] -pub struct FieldsWith<'a, F> { - f: F, - bytes: &'a [u8], - chars: CharIndices<'a>, -} - -impl<'a, F: FnMut(char) -> bool> FieldsWith<'a, F> { - fn new(bytes: &'a [u8], f: F) -> FieldsWith<'a, F> { - FieldsWith { f, bytes, chars: bytes.char_indices() } - } -} - -impl<'a, F: FnMut(char) -> bool> Iterator for FieldsWith<'a, F> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - let (start, mut end); - loop { - match self.chars.next() { - None => return None, - Some((s, e, ch)) => { - if !(self.f)(ch) { - start = s; - end = e; - break; - } - } - } - } - while let Some((_, e, ch)) = self.chars.next() { - if (self.f)(ch) { - break; - } - end = e; - } - Some(&self.bytes[start..end]) - } -} - -/// An iterator over substrings in a byte string, split by a separator. -/// -/// `'a` is the lifetime of the byte string being split. -#[derive(Debug)] -pub struct Split<'a> { - finder: Find<'a>, - /// The end position of the previous match of our splitter. The element - /// we yield corresponds to the substring starting at `last` up to the - /// beginning of the next match of the splitter. - last: usize, - /// Only set when iteration is complete. A corner case here is when a - /// splitter is matched at the end of the haystack. At that point, we still - /// need to yield an empty string following it. - done: bool, -} - -impl<'a> Split<'a> { - fn new(haystack: &'a [u8], splitter: &'a [u8]) -> Split<'a> { - let finder = haystack.find_iter(splitter); - Split { finder, last: 0, done: false } - } -} - -impl<'a> Iterator for Split<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - let haystack = self.finder.haystack; - match self.finder.next() { - Some(start) => { - let next = &haystack[self.last..start]; - self.last = start + self.finder.needle.len(); - Some(next) - } - None => { - if self.last >= haystack.len() { - if !self.done { - self.done = true; - Some(b"") - } else { - None - } - } else { - let s = &haystack[self.last..]; - self.last = haystack.len(); - self.done = true; - Some(s) - } - } - } - } -} - -/// An iterator over substrings in a byte string, split by a separator, in -/// reverse. -/// -/// `'a` is the lifetime of the byte string being split, while `F` is the type -/// of the predicate, i.e., `FnMut(char) -> bool`. -#[derive(Debug)] -pub struct SplitReverse<'a> { - finder: FindReverse<'a>, - /// The end position of the previous match of our splitter. The element - /// we yield corresponds to the substring starting at `last` up to the - /// beginning of the next match of the splitter. - last: usize, - /// Only set when iteration is complete. A corner case here is when a - /// splitter is matched at the end of the haystack. At that point, we still - /// need to yield an empty string following it. - done: bool, -} - -impl<'a> SplitReverse<'a> { - fn new(haystack: &'a [u8], splitter: &'a [u8]) -> SplitReverse<'a> { - let finder = haystack.rfind_iter(splitter); - SplitReverse { finder, last: haystack.len(), done: false } - } -} - -impl<'a> Iterator for SplitReverse<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - let haystack = self.finder.haystack(); - match self.finder.next() { - Some(start) => { - let nlen = self.finder.needle().len(); - let next = &haystack[start + nlen..self.last]; - self.last = start; - Some(next) - } - None => { - if self.last == 0 { - if !self.done { - self.done = true; - Some(b"") - } else { - None - } - } else { - let s = &haystack[..self.last]; - self.last = 0; - self.done = true; - Some(s) - } - } - } - } -} - -/// An iterator over at most `n` substrings in a byte string, split by a -/// separator. -/// -/// `'a` is the lifetime of the byte string being split, while `F` is the type -/// of the predicate, i.e., `FnMut(char) -> bool`. -#[derive(Debug)] -pub struct SplitN<'a> { - split: Split<'a>, - limit: usize, - count: usize, -} - -impl<'a> SplitN<'a> { - fn new( - haystack: &'a [u8], - splitter: &'a [u8], - limit: usize, - ) -> SplitN<'a> { - let split = haystack.split_str(splitter); - SplitN { split, limit, count: 0 } - } -} - -impl<'a> Iterator for SplitN<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - self.count += 1; - if self.count > self.limit || self.split.done { - None - } else if self.count == self.limit { - Some(&self.split.finder.haystack[self.split.last..]) - } else { - self.split.next() - } - } -} - -/// An iterator over at most `n` substrings in a byte string, split by a -/// separator, in reverse. -/// -/// `'a` is the lifetime of the byte string being split, while `F` is the type -/// of the predicate, i.e., `FnMut(char) -> bool`. -#[derive(Debug)] -pub struct SplitNReverse<'a> { - split: SplitReverse<'a>, - limit: usize, - count: usize, -} - -impl<'a> SplitNReverse<'a> { - fn new( - haystack: &'a [u8], - splitter: &'a [u8], - limit: usize, - ) -> SplitNReverse<'a> { - let split = haystack.rsplit_str(splitter); - SplitNReverse { split, limit, count: 0 } - } -} - -impl<'a> Iterator for SplitNReverse<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - self.count += 1; - if self.count > self.limit || self.split.done { - None - } else if self.count == self.limit { - Some(&self.split.finder.haystack()[..self.split.last]) - } else { - self.split.next() - } - } -} - -/// An iterator over all lines in a byte string, without their terminators. -/// -/// For this iterator, the only line terminators recognized are `\r\n` and -/// `\n`. -/// -/// `'a` is the lifetime of the byte string being iterated over. -pub struct Lines<'a> { - it: LinesWithTerminator<'a>, -} - -impl<'a> Lines<'a> { - fn new(bytes: &'a [u8]) -> Lines<'a> { - Lines { it: LinesWithTerminator::new(bytes) } - } -} - -impl<'a> Iterator for Lines<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - let mut line = self.it.next()?; - if line.last_byte() == Some(b'\n') { - line = &line[..line.len() - 1]; - if line.last_byte() == Some(b'\r') { - line = &line[..line.len() - 1]; - } - } - Some(line) - } -} - -/// An iterator over all lines in a byte string, including their terminators. -/// -/// For this iterator, the only line terminator recognized is `\n`. (Since -/// line terminators are included, this also handles `\r\n` line endings.) -/// -/// Line terminators are only included if they are present in the original -/// byte string. For example, the last line in a byte string may not end with -/// a line terminator. -/// -/// Concatenating all elements yielded by this iterator is guaranteed to yield -/// the original byte string. -/// -/// `'a` is the lifetime of the byte string being iterated over. -pub struct LinesWithTerminator<'a> { - bytes: &'a [u8], -} - -impl<'a> LinesWithTerminator<'a> { - fn new(bytes: &'a [u8]) -> LinesWithTerminator<'a> { - LinesWithTerminator { bytes } - } -} - -impl<'a> Iterator for LinesWithTerminator<'a> { - type Item = &'a [u8]; - - #[inline] - fn next(&mut self) -> Option<&'a [u8]> { - match self.bytes.find_byte(b'\n') { - None if self.bytes.is_empty() => None, - None => { - let line = self.bytes; - self.bytes = b""; - Some(line) - } - Some(end) => { - let line = &self.bytes[..end + 1]; - self.bytes = &self.bytes[end + 1..]; - Some(line) - } - } - } -} - -#[cfg(test)] -mod tests { - use crate::ext_slice::{ByteSlice, B}; - use crate::tests::LOSSY_TESTS; - - #[test] - fn to_str_lossy() { - for (i, &(expected, input)) in LOSSY_TESTS.iter().enumerate() { - let got = B(input).to_str_lossy(); - assert_eq!( - expected.as_bytes(), - got.as_bytes(), - "to_str_lossy(ith: {:?}, given: {:?})", - i, - input, - ); - - let mut got = String::new(); - B(input).to_str_lossy_into(&mut got); - assert_eq!( - expected.as_bytes(), - got.as_bytes(), - "to_str_lossy_into", - ); - - let got = String::from_utf8_lossy(input); - assert_eq!(expected.as_bytes(), got.as_bytes(), "std"); - } - } - - #[test] - #[should_panic] - fn copy_within_fail1() { - let mut buf = *b"foobar"; - let s = &mut buf; - s.copy_within_str(0..2, 5); - } - - #[test] - #[should_panic] - fn copy_within_fail2() { - let mut buf = *b"foobar"; - let s = &mut buf; - s.copy_within_str(3..2, 0); - } - - #[test] - #[should_panic] - fn copy_within_fail3() { - let mut buf = *b"foobar"; - let s = &mut buf; - s.copy_within_str(5..7, 0); - } - - #[test] - #[should_panic] - fn copy_within_fail4() { - let mut buf = *b"foobar"; - let s = &mut buf; - s.copy_within_str(0..1, 6); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_vec.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_vec.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_vec.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/ext_vec.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1105 +0,0 @@ -use std::borrow::Cow; -use std::error; -use std::ffi::{OsStr, OsString}; -use std::fmt; -use std::iter; -use std::ops; -use std::path::{Path, PathBuf}; -use std::ptr; -use std::str; -use std::vec; - -use crate::ext_slice::ByteSlice; -use crate::utf8::{self, Utf8Error}; - -/// Concatenate the elements given by the iterator together into a single -/// `Vec`. -/// -/// The elements may be any type that can be cheaply converted into an `&[u8]`. -/// This includes, but is not limited to, `&str`, `&BStr` and `&[u8]` itself. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use bstr; -/// -/// let s = bstr::concat(&["foo", "bar", "baz"]); -/// assert_eq!(s, "foobarbaz".as_bytes()); -/// ``` -#[inline] -pub fn concat(elements: I) -> Vec -where - T: AsRef<[u8]>, - I: IntoIterator, -{ - let mut dest = vec![]; - for element in elements { - dest.push_str(element); - } - dest -} - -/// Join the elements given by the iterator with the given separator into a -/// single `Vec`. -/// -/// Both the separator and the elements may be any type that can be cheaply -/// converted into an `&[u8]`. This includes, but is not limited to, -/// `&str`, `&BStr` and `&[u8]` itself. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use bstr; -/// -/// let s = bstr::join(",", &["foo", "bar", "baz"]); -/// assert_eq!(s, "foo,bar,baz".as_bytes()); -/// ``` -#[inline] -pub fn join(separator: B, elements: I) -> Vec -where - B: AsRef<[u8]>, - T: AsRef<[u8]>, - I: IntoIterator, -{ - let mut it = elements.into_iter(); - let mut dest = vec![]; - match it.next() { - None => return dest, - Some(first) => { - dest.push_str(first); - } - } - for element in it { - dest.push_str(&separator); - dest.push_str(element); - } - dest -} - -impl ByteVec for Vec { - #[inline] - fn as_vec(&self) -> &Vec { - self - } - - #[inline] - fn as_vec_mut(&mut self) -> &mut Vec { - self - } - - #[inline] - fn into_vec(self) -> Vec { - self - } -} - -/// Ensure that callers cannot implement `ByteSlice` by making an -/// umplementable trait its super trait. -pub trait Sealed {} -impl Sealed for Vec {} - -/// A trait that extends `Vec` with string oriented methods. -/// -/// Note that when using the constructor methods, such as -/// `ByteVec::from_slice`, one should actually call them using the concrete -/// type. For example: -/// -/// ``` -/// use bstr::{B, ByteVec}; -/// -/// let s = Vec::from_slice(b"abc"); // NOT ByteVec::from_slice("...") -/// assert_eq!(s, B("abc")); -/// ``` -pub trait ByteVec: Sealed { - /// A method for accessing the raw vector bytes of this type. This is - /// always a no-op and callers shouldn't care about it. This only exists - /// for making the extension trait work. - #[doc(hidden)] - fn as_vec(&self) -> &Vec; - - /// A method for accessing the raw vector bytes of this type, mutably. This - /// is always a no-op and callers shouldn't care about it. This only exists - /// for making the extension trait work. - #[doc(hidden)] - fn as_vec_mut(&mut self) -> &mut Vec; - - /// A method for consuming ownership of this vector. This is always a no-op - /// and callers shouldn't care about it. This only exists for making the - /// extension trait work. - #[doc(hidden)] - fn into_vec(self) -> Vec - where - Self: Sized; - - /// Create a new owned byte string from the given byte slice. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteVec}; - /// - /// let s = Vec::from_slice(b"abc"); - /// assert_eq!(s, B("abc")); - /// ``` - #[inline] - fn from_slice>(bytes: B) -> Vec { - bytes.as_ref().to_vec() - } - - /// Create a new byte string from an owned OS string. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns the original OS string if it is not valid UTF-8. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::ffi::OsString; - /// - /// use bstr::{B, ByteVec}; - /// - /// let os_str = OsString::from("foo"); - /// let bs = Vec::from_os_string(os_str).expect("valid UTF-8"); - /// assert_eq!(bs, B("foo")); - /// ``` - #[inline] - fn from_os_string(os_str: OsString) -> Result, OsString> { - #[cfg(unix)] - #[inline] - fn imp(os_str: OsString) -> Result, OsString> { - use std::os::unix::ffi::OsStringExt; - - Ok(Vec::from(os_str.into_vec())) - } - - #[cfg(not(unix))] - #[inline] - fn imp(os_str: OsString) -> Result, OsString> { - os_str.into_string().map(Vec::from) - } - - imp(os_str) - } - - /// Lossily create a new byte string from an OS string slice. - /// - /// On Unix, this always succeeds, is zero cost and always returns a slice. - /// On non-Unix systems, this does a UTF-8 check. If the given OS string - /// slice is not valid UTF-8, then it is lossily decoded into valid UTF-8 - /// (with invalid bytes replaced by the Unicode replacement codepoint). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::ffi::OsStr; - /// - /// use bstr::{B, ByteVec}; - /// - /// let os_str = OsStr::new("foo"); - /// let bs = Vec::from_os_str_lossy(os_str); - /// assert_eq!(bs, B("foo")); - /// ``` - #[inline] - fn from_os_str_lossy<'a>(os_str: &'a OsStr) -> Cow<'a, [u8]> { - #[cfg(unix)] - #[inline] - fn imp<'a>(os_str: &'a OsStr) -> Cow<'a, [u8]> { - use std::os::unix::ffi::OsStrExt; - - Cow::Borrowed(os_str.as_bytes()) - } - - #[cfg(not(unix))] - #[inline] - fn imp<'a>(os_str: &'a OsStr) -> Cow<'a, [u8]> { - match os_str.to_string_lossy() { - Cow::Borrowed(x) => Cow::Borrowed(x.as_bytes()), - Cow::Owned(x) => Cow::Owned(Vec::from(x)), - } - } - - imp(os_str) - } - - /// Create a new byte string from an owned file path. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns the original path if it is not valid UTF-8. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::path::PathBuf; - /// - /// use bstr::{B, ByteVec}; - /// - /// let path = PathBuf::from("foo"); - /// let bs = Vec::from_path_buf(path).expect("must be valid UTF-8"); - /// assert_eq!(bs, B("foo")); - /// ``` - #[inline] - fn from_path_buf(path: PathBuf) -> Result, PathBuf> { - Vec::from_os_string(path.into_os_string()).map_err(PathBuf::from) - } - - /// Lossily create a new byte string from a file path. - /// - /// On Unix, this always succeeds, is zero cost and always returns a slice. - /// On non-Unix systems, this does a UTF-8 check. If the given path is not - /// valid UTF-8, then it is lossily decoded into valid UTF-8 (with invalid - /// bytes replaced by the Unicode replacement codepoint). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::path::Path; - /// - /// use bstr::{B, ByteVec}; - /// - /// let path = Path::new("foo"); - /// let bs = Vec::from_path_lossy(path); - /// assert_eq!(bs, B("foo")); - /// ``` - #[inline] - fn from_path_lossy<'a>(path: &'a Path) -> Cow<'a, [u8]> { - Vec::from_os_str_lossy(path.as_os_str()) - } - - /// Appends the given byte to the end of this byte string. - /// - /// Note that this is equivalent to the generic `Vec::push` method. This - /// method is provided to permit callers to explicitly differentiate - /// between pushing bytes, codepoints and strings. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = >::from("abc"); - /// s.push_byte(b'\xE2'); - /// s.push_byte(b'\x98'); - /// s.push_byte(b'\x83'); - /// assert_eq!(s, "abc☃".as_bytes()); - /// ``` - #[inline] - fn push_byte(&mut self, byte: u8) { - self.as_vec_mut().push(byte); - } - - /// Appends the given `char` to the end of this byte string. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = >::from("abc"); - /// s.push_char('1'); - /// s.push_char('2'); - /// s.push_char('3'); - /// assert_eq!(s, "abc123".as_bytes()); - /// ``` - #[inline] - fn push_char(&mut self, ch: char) { - if ch.len_utf8() == 1 { - self.push_byte(ch as u8); - return; - } - self.as_vec_mut() - .extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()); - } - - /// Appends the given slice to the end of this byte string. This accepts - /// any type that be converted to a `&[u8]`. This includes, but is not - /// limited to, `&str`, `&BStr`, and of course, `&[u8]` itself. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = >::from("abc"); - /// s.push_str(b"123"); - /// assert_eq!(s, "abc123".as_bytes()); - /// ``` - #[inline] - fn push_str>(&mut self, bytes: B) { - self.as_vec_mut().extend_from_slice(bytes.as_ref()); - } - - /// Converts a `Vec` into a `String` if and only if this byte string is - /// valid UTF-8. - /// - /// If it is not valid UTF-8, then a - /// [`FromUtf8Error`](struct.FromUtf8Error.html) - /// is returned. (This error can be used to examine why UTF-8 validation - /// failed, or to regain the original byte string.) - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// # fn example() -> Result<(), Box> { - /// let bytes = Vec::from("hello"); - /// let string = bytes.into_string()?; - /// - /// assert_eq!("hello", string); - /// # Ok(()) }; example().unwrap() - /// ``` - /// - /// If this byte string is not valid UTF-8, then an error will be returned. - /// That error can then be used to inspect the location at which invalid - /// UTF-8 was found, or to regain the original byte string: - /// - /// ``` - /// use bstr::{B, ByteVec}; - /// - /// let bytes = Vec::from_slice(b"foo\xFFbar"); - /// let err = bytes.into_string().unwrap_err(); - /// - /// assert_eq!(err.utf8_error().valid_up_to(), 3); - /// assert_eq!(err.utf8_error().error_len(), Some(1)); - /// - /// // At no point in this example is an allocation performed. - /// let bytes = Vec::from(err.into_vec()); - /// assert_eq!(bytes, B(b"foo\xFFbar")); - /// ``` - #[inline] - fn into_string(self) -> Result - where - Self: Sized, - { - match utf8::validate(self.as_vec()) { - Err(err) => Err(FromUtf8Error { original: self.into_vec(), err }), - Ok(()) => { - // SAFETY: This is safe because of the guarantees provided by - // utf8::validate. - unsafe { Ok(self.into_string_unchecked()) } - } - } - } - - /// Lossily converts a `Vec` into a `String`. If this byte string - /// contains invalid UTF-8, then the invalid bytes are replaced with the - /// Unicode replacement codepoint. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let bytes = Vec::from_slice(b"foo\xFFbar"); - /// let string = bytes.into_string_lossy(); - /// assert_eq!(string, "foo\u{FFFD}bar"); - /// ``` - #[inline] - fn into_string_lossy(self) -> String - where - Self: Sized, - { - match self.as_vec().to_str_lossy() { - Cow::Borrowed(_) => { - // SAFETY: to_str_lossy() returning a Cow::Borrowed guarantees - // the entire string is valid utf8. - unsafe { self.into_string_unchecked() } - } - Cow::Owned(s) => s, - } - } - - /// Unsafely convert this byte string into a `String`, without checking for - /// valid UTF-8. - /// - /// # Safety - /// - /// Callers *must* ensure that this byte string is valid UTF-8 before - /// calling this method. Converting a byte string into a `String` that is - /// not valid UTF-8 is considered undefined behavior. - /// - /// This routine is useful in performance sensitive contexts where the - /// UTF-8 validity of the byte string is already known and it is - /// undesirable to pay the cost of an additional UTF-8 validation check - /// that [`into_string`](#method.into_string) performs. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// // SAFETY: This is safe because string literals are guaranteed to be - /// // valid UTF-8 by the Rust compiler. - /// let s = unsafe { Vec::from("☃βツ").into_string_unchecked() }; - /// assert_eq!("☃βツ", s); - /// ``` - #[inline] - unsafe fn into_string_unchecked(self) -> String - where - Self: Sized, - { - String::from_utf8_unchecked(self.into_vec()) - } - - /// Converts this byte string into an OS string, in place. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns the original byte string if it is not valid UTF-8. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::ffi::OsStr; - /// - /// use bstr::ByteVec; - /// - /// let bs = Vec::from("foo"); - /// let os_str = bs.into_os_string().expect("should be valid UTF-8"); - /// assert_eq!(os_str, OsStr::new("foo")); - /// ``` - #[inline] - fn into_os_string(self) -> Result> - where - Self: Sized, - { - #[cfg(unix)] - #[inline] - fn imp(v: Vec) -> Result> { - use std::os::unix::ffi::OsStringExt; - - Ok(OsString::from_vec(v)) - } - - #[cfg(not(unix))] - #[inline] - fn imp(v: Vec) -> Result> { - match v.into_string() { - Ok(s) => Ok(OsString::from(s)), - Err(err) => Err(err.into_vec()), - } - } - - imp(self.into_vec()) - } - - /// Lossily converts this byte string into an OS string, in place. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this will perform a UTF-8 check and lossily convert this byte string - /// into valid UTF-8 using the Unicode replacement codepoint. - /// - /// Note that this can prevent the correct roundtripping of file paths on - /// non-Unix systems such as Windows, where file paths are an arbitrary - /// sequence of 16-bit integers. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let bs = Vec::from_slice(b"foo\xFFbar"); - /// let os_str = bs.into_os_string_lossy(); - /// assert_eq!(os_str.to_string_lossy(), "foo\u{FFFD}bar"); - /// ``` - #[inline] - fn into_os_string_lossy(self) -> OsString - where - Self: Sized, - { - #[cfg(unix)] - #[inline] - fn imp(v: Vec) -> OsString { - use std::os::unix::ffi::OsStringExt; - - OsString::from_vec(v) - } - - #[cfg(not(unix))] - #[inline] - fn imp(v: Vec) -> OsString { - OsString::from(v.into_string_lossy()) - } - - imp(self.into_vec()) - } - - /// Converts this byte string into an owned file path, in place. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this returns the original byte string if it is not valid UTF-8. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let bs = Vec::from("foo"); - /// let path = bs.into_path_buf().expect("should be valid UTF-8"); - /// assert_eq!(path.as_os_str(), "foo"); - /// ``` - #[inline] - fn into_path_buf(self) -> Result> - where - Self: Sized, - { - self.into_os_string().map(PathBuf::from) - } - - /// Lossily converts this byte string into an owned file path, in place. - /// - /// On Unix, this always succeeds and is zero cost. On non-Unix systems, - /// this will perform a UTF-8 check and lossily convert this byte string - /// into valid UTF-8 using the Unicode replacement codepoint. - /// - /// Note that this can prevent the correct roundtripping of file paths on - /// non-Unix systems such as Windows, where file paths are an arbitrary - /// sequence of 16-bit integers. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let bs = Vec::from_slice(b"foo\xFFbar"); - /// let path = bs.into_path_buf_lossy(); - /// assert_eq!(path.to_string_lossy(), "foo\u{FFFD}bar"); - /// ``` - #[inline] - fn into_path_buf_lossy(self) -> PathBuf - where - Self: Sized, - { - PathBuf::from(self.into_os_string_lossy()) - } - - /// Removes the last byte from this `Vec` and returns it. - /// - /// If this byte string is empty, then `None` is returned. - /// - /// If the last codepoint in this byte string is not ASCII, then removing - /// the last byte could make this byte string contain invalid UTF-8. - /// - /// Note that this is equivalent to the generic `Vec::pop` method. This - /// method is provided to permit callers to explicitly differentiate - /// between popping bytes and codepoints. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foo"); - /// assert_eq!(s.pop_byte(), Some(b'o')); - /// assert_eq!(s.pop_byte(), Some(b'o')); - /// assert_eq!(s.pop_byte(), Some(b'f')); - /// assert_eq!(s.pop_byte(), None); - /// ``` - #[inline] - fn pop_byte(&mut self) -> Option { - self.as_vec_mut().pop() - } - - /// Removes the last codepoint from this `Vec` and returns it. - /// - /// If this byte string is empty, then `None` is returned. If the last - /// bytes of this byte string do not correspond to a valid UTF-8 code unit - /// sequence, then the Unicode replacement codepoint is yielded instead in - /// accordance with the - /// [replacement codepoint substitution policy](index.html#handling-of-invalid-utf8-8). - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foo"); - /// assert_eq!(s.pop_char(), Some('o')); - /// assert_eq!(s.pop_char(), Some('o')); - /// assert_eq!(s.pop_char(), Some('f')); - /// assert_eq!(s.pop_char(), None); - /// ``` - /// - /// This shows the replacement codepoint substitution policy. Note that - /// the first pop yields a replacement codepoint but actually removes two - /// bytes. This is in contrast with subsequent pops when encountering - /// `\xFF` since `\xFF` is never a valid prefix for any valid UTF-8 - /// code unit sequence. - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from_slice(b"f\xFF\xFF\xFFoo\xE2\x98"); - /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); - /// assert_eq!(s.pop_char(), Some('o')); - /// assert_eq!(s.pop_char(), Some('o')); - /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); - /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); - /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); - /// assert_eq!(s.pop_char(), Some('f')); - /// assert_eq!(s.pop_char(), None); - /// ``` - #[inline] - fn pop_char(&mut self) -> Option { - let (ch, size) = utf8::decode_last_lossy(self.as_vec()); - if size == 0 { - return None; - } - let new_len = self.as_vec().len() - size; - self.as_vec_mut().truncate(new_len); - Some(ch) - } - - /// Removes a `char` from this `Vec` at the given byte position and - /// returns it. - /// - /// If the bytes at the given position do not lead to a valid UTF-8 code - /// unit sequence, then a - /// [replacement codepoint is returned instead](index.html#handling-of-invalid-utf8-8). - /// - /// # Panics - /// - /// Panics if `at` is larger than or equal to this byte string's length. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foo☃bar"); - /// assert_eq!(s.remove_char(3), '☃'); - /// assert_eq!(s, b"foobar"); - /// ``` - /// - /// This example shows how the Unicode replacement codepoint policy is - /// used: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from_slice(b"foo\xFFbar"); - /// assert_eq!(s.remove_char(3), '\u{FFFD}'); - /// assert_eq!(s, b"foobar"); - /// ``` - #[inline] - fn remove_char(&mut self, at: usize) -> char { - let (ch, size) = utf8::decode_lossy(&self.as_vec()[at..]); - assert!( - size > 0, - "expected {} to be less than {}", - at, - self.as_vec().len(), - ); - self.as_vec_mut().drain(at..at + size); - ch - } - - /// Inserts the given codepoint into this `Vec` at a particular byte - /// position. - /// - /// This is an `O(n)` operation as it may copy a number of elements in this - /// byte string proportional to its length. - /// - /// # Panics - /// - /// Panics if `at` is larger than the byte string's length. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foobar"); - /// s.insert_char(3, '☃'); - /// assert_eq!(s, "foo☃bar".as_bytes()); - /// ``` - #[inline] - fn insert_char(&mut self, at: usize, ch: char) { - self.insert_str(at, ch.encode_utf8(&mut [0; 4]).as_bytes()); - } - - /// Inserts the given byte string into this byte string at a particular - /// byte position. - /// - /// This is an `O(n)` operation as it may copy a number of elements in this - /// byte string proportional to its length. - /// - /// The given byte string may be any type that can be cheaply converted - /// into a `&[u8]`. This includes, but is not limited to, `&str` and - /// `&[u8]`. - /// - /// # Panics - /// - /// Panics if `at` is larger than the byte string's length. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foobar"); - /// s.insert_str(3, "☃☃☃"); - /// assert_eq!(s, "foo☃☃☃bar".as_bytes()); - /// ``` - #[inline] - fn insert_str>(&mut self, at: usize, bytes: B) { - let bytes = bytes.as_ref(); - let len = self.as_vec().len(); - assert!(at <= len, "expected {} to be <= {}", at, len); - - // SAFETY: We'd like to efficiently splice in the given bytes into - // this byte string. Since we are only working with `u8` elements here, - // we only need to consider whether our bounds are correct and whether - // our byte string has enough space. - self.as_vec_mut().reserve(bytes.len()); - unsafe { - // Shift bytes after `at` over by the length of `bytes` to make - // room for it. This requires referencing two regions of memory - // that may overlap, so we use ptr::copy. - ptr::copy( - self.as_vec().as_ptr().add(at), - self.as_vec_mut().as_mut_ptr().add(at + bytes.len()), - len - at, - ); - // Now copy the bytes given into the room we made above. In this - // case, we know that the given bytes cannot possibly overlap - // with this byte string since we have a mutable borrow of the - // latter. Thus, we can use a nonoverlapping copy. - ptr::copy_nonoverlapping( - bytes.as_ptr(), - self.as_vec_mut().as_mut_ptr().add(at), - bytes.len(), - ); - self.as_vec_mut().set_len(len + bytes.len()); - } - } - - /// Removes the specified range in this byte string and replaces it with - /// the given bytes. The given bytes do not need to have the same length - /// as the range provided. - /// - /// # Panics - /// - /// Panics if the given range is invalid. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foobar"); - /// s.replace_range(2..4, "xxxxx"); - /// assert_eq!(s, "foxxxxxar".as_bytes()); - /// ``` - #[inline] - fn replace_range(&mut self, range: R, replace_with: B) - where - R: ops::RangeBounds, - B: AsRef<[u8]>, - { - self.as_vec_mut().splice(range, replace_with.as_ref().iter().cloned()); - } - - /// Creates a draining iterator that removes the specified range in this - /// `Vec` and yields each of the removed bytes. - /// - /// Note that the elements specified by the given range are removed - /// regardless of whether the returned iterator is fully exhausted. - /// - /// Also note that is is unspecified how many bytes are removed from the - /// `Vec` if the `DrainBytes` iterator is leaked. - /// - /// # Panics - /// - /// Panics if the given range is not valid. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::ByteVec; - /// - /// let mut s = Vec::from("foobar"); - /// { - /// let mut drainer = s.drain_bytes(2..4); - /// assert_eq!(drainer.next(), Some(b'o')); - /// assert_eq!(drainer.next(), Some(b'b')); - /// assert_eq!(drainer.next(), None); - /// } - /// assert_eq!(s, "foar".as_bytes()); - /// ``` - #[inline] - fn drain_bytes(&mut self, range: R) -> DrainBytes<'_> - where - R: ops::RangeBounds, - { - DrainBytes { it: self.as_vec_mut().drain(range) } - } -} - -/// A draining byte oriented iterator for `Vec`. -/// -/// This iterator is created by -/// [`ByteVec::drain_bytes`](trait.ByteVec.html#method.drain_bytes). -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use bstr::ByteVec; -/// -/// let mut s = Vec::from("foobar"); -/// { -/// let mut drainer = s.drain_bytes(2..4); -/// assert_eq!(drainer.next(), Some(b'o')); -/// assert_eq!(drainer.next(), Some(b'b')); -/// assert_eq!(drainer.next(), None); -/// } -/// assert_eq!(s, "foar".as_bytes()); -/// ``` -#[derive(Debug)] -pub struct DrainBytes<'a> { - it: vec::Drain<'a, u8>, -} - -impl<'a> iter::FusedIterator for DrainBytes<'a> {} - -impl<'a> Iterator for DrainBytes<'a> { - type Item = u8; - - #[inline] - fn next(&mut self) -> Option { - self.it.next() - } -} - -impl<'a> DoubleEndedIterator for DrainBytes<'a> { - #[inline] - fn next_back(&mut self) -> Option { - self.it.next_back() - } -} - -impl<'a> ExactSizeIterator for DrainBytes<'a> { - #[inline] - fn len(&self) -> usize { - self.it.len() - } -} - -/// An error that may occur when converting a `Vec` to a `String`. -/// -/// This error includes the original `Vec` that failed to convert to a -/// `String`. This permits callers to recover the allocation used even if it -/// it not valid UTF-8. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use bstr::{B, ByteVec}; -/// -/// let bytes = Vec::from_slice(b"foo\xFFbar"); -/// let err = bytes.into_string().unwrap_err(); -/// -/// assert_eq!(err.utf8_error().valid_up_to(), 3); -/// assert_eq!(err.utf8_error().error_len(), Some(1)); -/// -/// // At no point in this example is an allocation performed. -/// let bytes = Vec::from(err.into_vec()); -/// assert_eq!(bytes, B(b"foo\xFFbar")); -/// ``` -#[derive(Debug, Eq, PartialEq)] -pub struct FromUtf8Error { - original: Vec, - err: Utf8Error, -} - -impl FromUtf8Error { - /// Return the original bytes as a slice that failed to convert to a - /// `String`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteVec}; - /// - /// let bytes = Vec::from_slice(b"foo\xFFbar"); - /// let err = bytes.into_string().unwrap_err(); - /// - /// // At no point in this example is an allocation performed. - /// assert_eq!(err.as_bytes(), B(b"foo\xFFbar")); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &[u8] { - &self.original - } - - /// Consume this error and return the original byte string that failed to - /// convert to a `String`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteVec}; - /// - /// let bytes = Vec::from_slice(b"foo\xFFbar"); - /// let err = bytes.into_string().unwrap_err(); - /// let original = err.into_vec(); - /// - /// // At no point in this example is an allocation performed. - /// assert_eq!(original, B(b"foo\xFFbar")); - /// ``` - #[inline] - pub fn into_vec(self) -> Vec { - self.original - } - - /// Return the underlying UTF-8 error that occurred. This error provides - /// information on the nature and location of the invalid UTF-8 detected. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use bstr::{B, ByteVec}; - /// - /// let bytes = Vec::from_slice(b"foo\xFFbar"); - /// let err = bytes.into_string().unwrap_err(); - /// - /// assert_eq!(err.utf8_error().valid_up_to(), 3); - /// assert_eq!(err.utf8_error().error_len(), Some(1)); - /// ``` - #[inline] - pub fn utf8_error(&self) -> &Utf8Error { - &self.err - } -} - -impl error::Error for FromUtf8Error { - #[inline] - fn description(&self) -> &str { - "invalid UTF-8 vector" - } -} - -impl fmt::Display for FromUtf8Error { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.err) - } -} - -#[cfg(test)] -mod tests { - use crate::ext_vec::ByteVec; - - #[test] - fn insert() { - let mut s = vec![]; - s.insert_str(0, "foo"); - assert_eq!(s, "foo".as_bytes()); - - let mut s = Vec::from("a"); - s.insert_str(0, "foo"); - assert_eq!(s, "fooa".as_bytes()); - - let mut s = Vec::from("a"); - s.insert_str(1, "foo"); - assert_eq!(s, "afoo".as_bytes()); - - let mut s = Vec::from("foobar"); - s.insert_str(3, "quux"); - assert_eq!(s, "fooquuxbar".as_bytes()); - - let mut s = Vec::from("foobar"); - s.insert_str(3, "x"); - assert_eq!(s, "fooxbar".as_bytes()); - - let mut s = Vec::from("foobar"); - s.insert_str(0, "x"); - assert_eq!(s, "xfoobar".as_bytes()); - - let mut s = Vec::from("foobar"); - s.insert_str(6, "x"); - assert_eq!(s, "foobarx".as_bytes()); - - let mut s = Vec::from("foobar"); - s.insert_str(3, "quuxbazquux"); - assert_eq!(s, "fooquuxbazquuxbar".as_bytes()); - } - - #[test] - #[should_panic] - fn insert_fail1() { - let mut s = vec![]; - s.insert_str(1, "foo"); - } - - #[test] - #[should_panic] - fn insert_fail2() { - let mut s = Vec::from("a"); - s.insert_str(2, "foo"); - } - - #[test] - #[should_panic] - fn insert_fail3() { - let mut s = Vec::from("foobar"); - s.insert_str(7, "foo"); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/impls.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/impls.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/impls.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/impls.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,987 +0,0 @@ -macro_rules! impl_partial_eq { - ($lhs:ty, $rhs:ty) => { - impl<'a, 'b> PartialEq<$rhs> for $lhs { - #[inline] - fn eq(&self, other: &$rhs) -> bool { - let other: &[u8] = other.as_ref(); - PartialEq::eq(self.as_bytes(), other) - } - } - - impl<'a, 'b> PartialEq<$lhs> for $rhs { - #[inline] - fn eq(&self, other: &$lhs) -> bool { - let this: &[u8] = self.as_ref(); - PartialEq::eq(this, other.as_bytes()) - } - } - }; -} - -#[cfg(feature = "std")] -macro_rules! impl_partial_eq_cow { - ($lhs:ty, $rhs:ty) => { - impl<'a, 'b> PartialEq<$rhs> for $lhs { - #[inline] - fn eq(&self, other: &$rhs) -> bool { - let other: &[u8] = (&**other).as_ref(); - PartialEq::eq(self.as_bytes(), other) - } - } - - impl<'a, 'b> PartialEq<$lhs> for $rhs { - #[inline] - fn eq(&self, other: &$lhs) -> bool { - let this: &[u8] = (&**other).as_ref(); - PartialEq::eq(this, self.as_bytes()) - } - } - }; -} - -macro_rules! impl_partial_ord { - ($lhs:ty, $rhs:ty) => { - impl<'a, 'b> PartialOrd<$rhs> for $lhs { - #[inline] - fn partial_cmp(&self, other: &$rhs) -> Option { - let other: &[u8] = other.as_ref(); - PartialOrd::partial_cmp(self.as_bytes(), other) - } - } - - impl<'a, 'b> PartialOrd<$lhs> for $rhs { - #[inline] - fn partial_cmp(&self, other: &$lhs) -> Option { - let this: &[u8] = self.as_ref(); - PartialOrd::partial_cmp(this, other.as_bytes()) - } - } - }; -} - -#[cfg(feature = "std")] -mod bstring { - use std::borrow::{Borrow, Cow, ToOwned}; - use std::cmp::Ordering; - use std::fmt; - use std::iter::FromIterator; - use std::ops; - - use crate::bstr::BStr; - use crate::bstring::BString; - use crate::ext_vec::ByteVec; - - impl fmt::Display for BString { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(self.as_bstr(), f) - } - } - - impl fmt::Debug for BString { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(self.as_bstr(), f) - } - } - - impl ops::Deref for BString { - type Target = Vec; - - #[inline] - fn deref(&self) -> &Vec { - &self.bytes - } - } - - impl ops::DerefMut for BString { - #[inline] - fn deref_mut(&mut self) -> &mut Vec { - &mut self.bytes - } - } - - impl AsRef<[u8]> for BString { - #[inline] - fn as_ref(&self) -> &[u8] { - &self.bytes - } - } - - impl AsRef for BString { - #[inline] - fn as_ref(&self) -> &BStr { - self.as_bstr() - } - } - - impl AsMut<[u8]> for BString { - #[inline] - fn as_mut(&mut self) -> &mut [u8] { - &mut self.bytes - } - } - - impl AsMut for BString { - #[inline] - fn as_mut(&mut self) -> &mut BStr { - self.as_mut_bstr() - } - } - - impl Borrow for BString { - #[inline] - fn borrow(&self) -> &BStr { - self.as_bstr() - } - } - - impl ToOwned for BStr { - type Owned = BString; - - #[inline] - fn to_owned(&self) -> BString { - BString::from(self) - } - } - - impl Default for BString { - fn default() -> BString { - BString::from(vec![]) - } - } - - impl<'a> From<&'a [u8]> for BString { - #[inline] - fn from(s: &'a [u8]) -> BString { - BString::from(s.to_vec()) - } - } - - impl From> for BString { - #[inline] - fn from(s: Vec) -> BString { - BString { bytes: s } - } - } - - impl From for Vec { - #[inline] - fn from(s: BString) -> Vec { - s.bytes - } - } - - impl<'a> From<&'a str> for BString { - #[inline] - fn from(s: &'a str) -> BString { - BString::from(s.as_bytes().to_vec()) - } - } - - impl From for BString { - #[inline] - fn from(s: String) -> BString { - BString::from(s.into_bytes()) - } - } - - impl<'a> From<&'a BStr> for BString { - #[inline] - fn from(s: &'a BStr) -> BString { - BString::from(s.bytes.to_vec()) - } - } - - impl<'a> From for Cow<'a, BStr> { - #[inline] - fn from(s: BString) -> Cow<'a, BStr> { - Cow::Owned(s) - } - } - - impl FromIterator for BString { - #[inline] - fn from_iter>(iter: T) -> BString { - BString::from(iter.into_iter().collect::()) - } - } - - impl FromIterator for BString { - #[inline] - fn from_iter>(iter: T) -> BString { - BString::from(iter.into_iter().collect::>()) - } - } - - impl<'a> FromIterator<&'a str> for BString { - #[inline] - fn from_iter>(iter: T) -> BString { - let mut buf = vec![]; - for b in iter { - buf.push_str(b); - } - BString::from(buf) - } - } - - impl<'a> FromIterator<&'a [u8]> for BString { - #[inline] - fn from_iter>(iter: T) -> BString { - let mut buf = vec![]; - for b in iter { - buf.push_str(b); - } - BString::from(buf) - } - } - - impl<'a> FromIterator<&'a BStr> for BString { - #[inline] - fn from_iter>(iter: T) -> BString { - let mut buf = vec![]; - for b in iter { - buf.push_str(b); - } - BString::from(buf) - } - } - - impl FromIterator for BString { - #[inline] - fn from_iter>(iter: T) -> BString { - let mut buf = vec![]; - for b in iter { - buf.push_str(b); - } - BString::from(buf) - } - } - - impl Eq for BString {} - - impl PartialEq for BString { - #[inline] - fn eq(&self, other: &BString) -> bool { - &self[..] == &other[..] - } - } - - impl_partial_eq!(BString, Vec); - impl_partial_eq!(BString, [u8]); - impl_partial_eq!(BString, &'a [u8]); - impl_partial_eq!(BString, String); - impl_partial_eq!(BString, str); - impl_partial_eq!(BString, &'a str); - impl_partial_eq!(BString, BStr); - impl_partial_eq!(BString, &'a BStr); - - impl PartialOrd for BString { - #[inline] - fn partial_cmp(&self, other: &BString) -> Option { - PartialOrd::partial_cmp(&self.bytes, &other.bytes) - } - } - - impl Ord for BString { - #[inline] - fn cmp(&self, other: &BString) -> Ordering { - self.partial_cmp(other).unwrap() - } - } - - impl_partial_ord!(BString, Vec); - impl_partial_ord!(BString, [u8]); - impl_partial_ord!(BString, &'a [u8]); - impl_partial_ord!(BString, String); - impl_partial_ord!(BString, str); - impl_partial_ord!(BString, &'a str); - impl_partial_ord!(BString, BStr); - impl_partial_ord!(BString, &'a BStr); -} - -mod bstr { - #[cfg(feature = "std")] - use std::borrow::Cow; - - use core::cmp::Ordering; - use core::fmt; - use core::ops; - - use crate::bstr::BStr; - use crate::ext_slice::ByteSlice; - - impl fmt::Display for BStr { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - /// Write the given bstr (lossily) to the given formatter. - fn write_bstr( - f: &mut fmt::Formatter<'_>, - bstr: &BStr, - ) -> Result<(), fmt::Error> { - for chunk in bstr.utf8_chunks() { - f.write_str(chunk.valid())?; - if !chunk.invalid().is_empty() { - f.write_str("\u{FFFD}")?; - } - } - Ok(()) - } - - /// Write 'num' fill characters to the given formatter. - fn write_pads( - f: &mut fmt::Formatter<'_>, - num: usize, - ) -> fmt::Result { - let fill = f.fill(); - for _ in 0..num { - f.write_fmt(format_args!("{}", fill))?; - } - Ok(()) - } - - if let Some(align) = f.align() { - let width = f.width().unwrap_or(0); - let nchars = self.chars().count(); - let remaining_pads = width.saturating_sub(nchars); - match align { - fmt::Alignment::Left => { - write_bstr(f, self)?; - write_pads(f, remaining_pads)?; - } - fmt::Alignment::Right => { - write_pads(f, remaining_pads)?; - write_bstr(f, self)?; - } - fmt::Alignment::Center => { - let half = remaining_pads / 2; - let second_half = if remaining_pads % 2 == 0 { - half - } else { - half + 1 - }; - write_pads(f, half)?; - write_bstr(f, self)?; - write_pads(f, second_half)?; - } - } - Ok(()) - } else { - write_bstr(f, self)?; - Ok(()) - } - } - } - - impl fmt::Debug for BStr { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "\"")?; - for (s, e, ch) in self.char_indices() { - match ch { - '\0' => write!(f, "\\0")?, - '\u{FFFD}' => { - let bytes = self[s..e].as_bytes(); - if bytes == b"\xEF\xBF\xBD" { - write!(f, "{}", ch.escape_debug())?; - } else { - for &b in self[s..e].as_bytes() { - write!(f, r"\x{:02X}", b)?; - } - } - } - // ASCII control characters except \0, \n, \r, \t - '\x01'..='\x08' - | '\x0b' - | '\x0c' - | '\x0e'..='\x19' - | '\x7f' => { - write!(f, "\\x{:02x}", ch as u32)?; - } - '\n' | '\r' | '\t' | _ => { - write!(f, "{}", ch.escape_debug())?; - } - } - } - write!(f, "\"")?; - Ok(()) - } - } - - impl ops::Deref for BStr { - type Target = [u8]; - - #[inline] - fn deref(&self) -> &[u8] { - &self.bytes - } - } - - impl ops::DerefMut for BStr { - #[inline] - fn deref_mut(&mut self) -> &mut [u8] { - &mut self.bytes - } - } - - impl ops::Index for BStr { - type Output = u8; - - #[inline] - fn index(&self, idx: usize) -> &u8 { - &self.as_bytes()[idx] - } - } - - impl ops::Index for BStr { - type Output = BStr; - - #[inline] - fn index(&self, _: ops::RangeFull) -> &BStr { - self - } - } - - impl ops::Index> for BStr { - type Output = BStr; - - #[inline] - fn index(&self, r: ops::Range) -> &BStr { - BStr::new(&self.as_bytes()[r.start..r.end]) - } - } - - impl ops::Index> for BStr { - type Output = BStr; - - #[inline] - fn index(&self, r: ops::RangeInclusive) -> &BStr { - BStr::new(&self.as_bytes()[*r.start()..=*r.end()]) - } - } - - impl ops::Index> for BStr { - type Output = BStr; - - #[inline] - fn index(&self, r: ops::RangeFrom) -> &BStr { - BStr::new(&self.as_bytes()[r.start..]) - } - } - - impl ops::Index> for BStr { - type Output = BStr; - - #[inline] - fn index(&self, r: ops::RangeTo) -> &BStr { - BStr::new(&self.as_bytes()[..r.end]) - } - } - - impl ops::Index> for BStr { - type Output = BStr; - - #[inline] - fn index(&self, r: ops::RangeToInclusive) -> &BStr { - BStr::new(&self.as_bytes()[..=r.end]) - } - } - - impl ops::IndexMut for BStr { - #[inline] - fn index_mut(&mut self, idx: usize) -> &mut u8 { - &mut self.bytes[idx] - } - } - - impl ops::IndexMut for BStr { - #[inline] - fn index_mut(&mut self, _: ops::RangeFull) -> &mut BStr { - self - } - } - - impl ops::IndexMut> for BStr { - #[inline] - fn index_mut(&mut self, r: ops::Range) -> &mut BStr { - BStr::from_bytes_mut(&mut self.bytes[r.start..r.end]) - } - } - - impl ops::IndexMut> for BStr { - #[inline] - fn index_mut(&mut self, r: ops::RangeInclusive) -> &mut BStr { - BStr::from_bytes_mut(&mut self.bytes[*r.start()..=*r.end()]) - } - } - - impl ops::IndexMut> for BStr { - #[inline] - fn index_mut(&mut self, r: ops::RangeFrom) -> &mut BStr { - BStr::from_bytes_mut(&mut self.bytes[r.start..]) - } - } - - impl ops::IndexMut> for BStr { - #[inline] - fn index_mut(&mut self, r: ops::RangeTo) -> &mut BStr { - BStr::from_bytes_mut(&mut self.bytes[..r.end]) - } - } - - impl ops::IndexMut> for BStr { - #[inline] - fn index_mut(&mut self, r: ops::RangeToInclusive) -> &mut BStr { - BStr::from_bytes_mut(&mut self.bytes[..=r.end]) - } - } - - impl AsRef<[u8]> for BStr { - #[inline] - fn as_ref(&self) -> &[u8] { - self.as_bytes() - } - } - - impl AsRef for [u8] { - #[inline] - fn as_ref(&self) -> &BStr { - BStr::new(self) - } - } - - impl AsRef for str { - #[inline] - fn as_ref(&self) -> &BStr { - BStr::new(self) - } - } - - impl AsMut<[u8]> for BStr { - #[inline] - fn as_mut(&mut self) -> &mut [u8] { - &mut self.bytes - } - } - - impl AsMut for [u8] { - #[inline] - fn as_mut(&mut self) -> &mut BStr { - BStr::new_mut(self) - } - } - - impl<'a> Default for &'a BStr { - fn default() -> &'a BStr { - BStr::from_bytes(b"") - } - } - - impl<'a> Default for &'a mut BStr { - fn default() -> &'a mut BStr { - BStr::from_bytes_mut(&mut []) - } - } - - impl<'a> From<&'a [u8]> for &'a BStr { - #[inline] - fn from(s: &'a [u8]) -> &'a BStr { - BStr::from_bytes(s) - } - } - - impl<'a> From<&'a str> for &'a BStr { - #[inline] - fn from(s: &'a str) -> &'a BStr { - BStr::from_bytes(s.as_bytes()) - } - } - - #[cfg(feature = "std")] - impl<'a> From<&'a BStr> for Cow<'a, BStr> { - #[inline] - fn from(s: &'a BStr) -> Cow<'a, BStr> { - Cow::Borrowed(s) - } - } - - #[cfg(feature = "std")] - impl From> for Box { - #[inline] - fn from(s: Box<[u8]>) -> Box { - BStr::from_boxed_bytes(s) - } - } - - #[cfg(feature = "std")] - impl From> for Box<[u8]> { - #[inline] - fn from(s: Box) -> Box<[u8]> { - BStr::into_boxed_bytes(s) - } - } - - impl Eq for BStr {} - - impl PartialEq for BStr { - #[inline] - fn eq(&self, other: &BStr) -> bool { - self.as_bytes() == other.as_bytes() - } - } - - impl_partial_eq!(BStr, [u8]); - impl_partial_eq!(BStr, &'a [u8]); - impl_partial_eq!(BStr, str); - impl_partial_eq!(BStr, &'a str); - - #[cfg(feature = "std")] - impl_partial_eq!(BStr, Vec); - #[cfg(feature = "std")] - impl_partial_eq!(&'a BStr, Vec); - #[cfg(feature = "std")] - impl_partial_eq!(BStr, String); - #[cfg(feature = "std")] - impl_partial_eq!(&'a BStr, String); - #[cfg(feature = "std")] - impl_partial_eq_cow!(&'a BStr, Cow<'a, BStr>); - #[cfg(feature = "std")] - impl_partial_eq_cow!(&'a BStr, Cow<'a, str>); - #[cfg(feature = "std")] - impl_partial_eq_cow!(&'a BStr, Cow<'a, [u8]>); - - impl PartialOrd for BStr { - #[inline] - fn partial_cmp(&self, other: &BStr) -> Option { - PartialOrd::partial_cmp(self.as_bytes(), other.as_bytes()) - } - } - - impl Ord for BStr { - #[inline] - fn cmp(&self, other: &BStr) -> Ordering { - self.partial_cmp(other).unwrap() - } - } - - impl_partial_ord!(BStr, [u8]); - impl_partial_ord!(BStr, &'a [u8]); - impl_partial_ord!(BStr, str); - impl_partial_ord!(BStr, &'a str); - - #[cfg(feature = "std")] - impl_partial_ord!(BStr, Vec); - #[cfg(feature = "std")] - impl_partial_ord!(&'a BStr, Vec); - #[cfg(feature = "std")] - impl_partial_ord!(BStr, String); - #[cfg(feature = "std")] - impl_partial_ord!(&'a BStr, String); -} - -#[cfg(feature = "serde1-nostd")] -mod bstr_serde { - use core::fmt; - - use serde::{ - de::Error, de::Visitor, Deserialize, Deserializer, Serialize, - Serializer, - }; - - use crate::bstr::BStr; - - impl Serialize for BStr { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_bytes(self.as_bytes()) - } - } - - impl<'a, 'de: 'a> Deserialize<'de> for &'a BStr { - #[inline] - fn deserialize(deserializer: D) -> Result<&'a BStr, D::Error> - where - D: Deserializer<'de>, - { - struct BStrVisitor; - - impl<'de> Visitor<'de> for BStrVisitor { - type Value = &'de BStr; - - fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("a borrowed byte string") - } - - #[inline] - fn visit_borrowed_bytes( - self, - value: &'de [u8], - ) -> Result<&'de BStr, E> { - Ok(BStr::new(value)) - } - - #[inline] - fn visit_borrowed_str( - self, - value: &'de str, - ) -> Result<&'de BStr, E> { - Ok(BStr::new(value)) - } - } - - deserializer.deserialize_bytes(BStrVisitor) - } - } -} - -#[cfg(feature = "serde1")] -mod bstring_serde { - use std::cmp; - use std::fmt; - - use serde::{ - de::Error, de::SeqAccess, de::Visitor, Deserialize, Deserializer, - Serialize, Serializer, - }; - - use crate::bstring::BString; - - impl Serialize for BString { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_bytes(self.as_bytes()) - } - } - - impl<'de> Deserialize<'de> for BString { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct BStringVisitor; - - impl<'de> Visitor<'de> for BStringVisitor { - type Value = BString; - - fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("a byte string") - } - - #[inline] - fn visit_seq>( - self, - mut visitor: V, - ) -> Result { - let len = cmp::min(visitor.size_hint().unwrap_or(0), 256); - let mut bytes = Vec::with_capacity(len); - while let Some(v) = visitor.next_element()? { - bytes.push(v); - } - Ok(BString::from(bytes)) - } - - #[inline] - fn visit_bytes( - self, - value: &[u8], - ) -> Result { - Ok(BString::from(value)) - } - - #[inline] - fn visit_byte_buf( - self, - value: Vec, - ) -> Result { - Ok(BString::from(value)) - } - - #[inline] - fn visit_str( - self, - value: &str, - ) -> Result { - Ok(BString::from(value)) - } - - #[inline] - fn visit_string( - self, - value: String, - ) -> Result { - Ok(BString::from(value)) - } - } - - deserializer.deserialize_byte_buf(BStringVisitor) - } - } -} - -#[cfg(test)] -mod display { - use crate::bstring::BString; - use crate::ByteSlice; - - #[test] - fn clean() { - assert_eq!(&format!("{}", &b"abc".as_bstr()), "abc"); - assert_eq!(&format!("{}", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��"); - } - - #[test] - fn width_bigger_than_bstr() { - assert_eq!(&format!("{:<7}!", &b"abc".as_bstr()), "abc !"); - assert_eq!(&format!("{:>7}!", &b"abc".as_bstr()), " abc!"); - assert_eq!(&format!("{:^7}!", &b"abc".as_bstr()), " abc !"); - assert_eq!(&format!("{:^6}!", &b"abc".as_bstr()), " abc !"); - assert_eq!(&format!("{:-<7}!", &b"abc".as_bstr()), "abc----!"); - assert_eq!(&format!("{:->7}!", &b"abc".as_bstr()), "----abc!"); - assert_eq!(&format!("{:-^7}!", &b"abc".as_bstr()), "--abc--!"); - assert_eq!(&format!("{:-^6}!", &b"abc".as_bstr()), "-abc--!"); - - assert_eq!( - &format!("{:<7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(�� !" - ); - assert_eq!( - &format!("{:>7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - " �(��!" - ); - assert_eq!( - &format!("{:^7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - " �(�� !" - ); - assert_eq!( - &format!("{:^6}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - " �(�� !" - ); - - assert_eq!( - &format!("{:-<7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��---!" - ); - assert_eq!( - &format!("{:->7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "---�(��!" - ); - assert_eq!( - &format!("{:-^7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "-�(��--!" - ); - assert_eq!( - &format!("{:-^6}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "-�(��-!" - ); - } - - #[test] - fn width_lesser_than_bstr() { - assert_eq!(&format!("{:<2}!", &b"abc".as_bstr()), "abc!"); - assert_eq!(&format!("{:>2}!", &b"abc".as_bstr()), "abc!"); - assert_eq!(&format!("{:^2}!", &b"abc".as_bstr()), "abc!"); - assert_eq!(&format!("{:-<2}!", &b"abc".as_bstr()), "abc!"); - assert_eq!(&format!("{:->2}!", &b"abc".as_bstr()), "abc!"); - assert_eq!(&format!("{:-^2}!", &b"abc".as_bstr()), "abc!"); - - assert_eq!( - &format!("{:<3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - assert_eq!( - &format!("{:>3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - assert_eq!( - &format!("{:^3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - assert_eq!( - &format!("{:^2}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - - assert_eq!( - &format!("{:-<3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - assert_eq!( - &format!("{:->3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - assert_eq!( - &format!("{:-^3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - assert_eq!( - &format!("{:-^2}!", &b"\xf0\x28\x8c\xbc".as_bstr()), - "�(��!" - ); - } - - quickcheck::quickcheck! { - fn total_length(bstr: BString) -> bool { - let size = bstr.chars().count(); - format!("{:<1$}", bstr.as_bstr(), size).chars().count() >= size - } - } -} - -#[cfg(test)] -mod bstring_arbitrary { - use crate::bstring::BString; - - use quickcheck::{Arbitrary, Gen}; - - impl Arbitrary for BString { - fn arbitrary(g: &mut Gen) -> BString { - BString::from(Vec::::arbitrary(g)) - } - - fn shrink(&self) -> Box> { - Box::new(self.bytes.shrink().map(BString::from)) - } - } -} - -#[test] -fn test_debug() { - use crate::{ByteSlice, B}; - - assert_eq!( - r#""\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp""#, - format!("{:?}", b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp".as_bstr()), - ); - - // Tests that if the underlying bytes contain the UTF-8 encoding of the - // replacement codepoint, then we emit the codepoint just like other - // non-printable Unicode characters. - assert_eq!( - b"\"\\xFF\xEF\xBF\xBD\\xFF\"".as_bstr(), - // Before fixing #72, the output here would be: - // \\xFF\\xEF\\xBF\\xBD\\xFF - B(&format!("{:?}", b"\xFF\xEF\xBF\xBD\xFF".as_bstr())).as_bstr(), - ); -} - -// See: https://github.com/BurntSushi/bstr/issues/82 -#[test] -fn test_cows_regression() { - use crate::ByteSlice; - use std::borrow::Cow; - - let c1 = Cow::from(b"hello bstr".as_bstr()); - let c2 = b"goodbye bstr".as_bstr(); - assert_ne!(c1, c2); - - let c3 = Cow::from("hello str"); - let c4 = "goodbye str"; - assert_ne!(c3, c4); -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/io.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/io.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/io.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/io.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,514 +0,0 @@ -/*! -Utilities for working with I/O using byte strings. - -This module currently only exports a single trait, `BufReadExt`, which provides -facilities for conveniently and efficiently working with lines as byte strings. - -More APIs may be added in the future. -*/ - -use std::io; - -use crate::ext_slice::ByteSlice; -use crate::ext_vec::ByteVec; - -/// An extention trait for -/// [`std::io::BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html) -/// which provides convenience APIs for dealing with byte strings. -pub trait BufReadExt: io::BufRead { - /// Returns an iterator over the lines of this reader, where each line - /// is represented as a byte string. - /// - /// Each item yielded by this iterator is a `io::Result>`, where - /// an error is yielded if there was a problem reading from the underlying - /// reader. - /// - /// On success, the next line in the iterator is returned. The line does - /// *not* contain a trailing `\n` or `\r\n`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::io; - /// - /// use bstr::io::BufReadExt; - /// - /// # fn example() -> Result<(), io::Error> { - /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); - /// - /// let mut lines = vec![]; - /// for result in cursor.byte_lines() { - /// let line = result?; - /// lines.push(line); - /// } - /// assert_eq!(lines.len(), 3); - /// assert_eq!(lines[0], "lorem".as_bytes()); - /// assert_eq!(lines[1], "ipsum".as_bytes()); - /// assert_eq!(lines[2], "dolor".as_bytes()); - /// # Ok(()) }; example().unwrap() - /// ``` - fn byte_lines(self) -> ByteLines - where - Self: Sized, - { - ByteLines { buf: self } - } - - /// Returns an iterator over byte-terminated records of this reader, where - /// each record is represented as a byte string. - /// - /// Each item yielded by this iterator is a `io::Result>`, where - /// an error is yielded if there was a problem reading from the underlying - /// reader. - /// - /// On success, the next record in the iterator is returned. The record - /// does *not* contain its trailing terminator. - /// - /// Note that calling `byte_records(b'\n')` differs from `byte_lines()` in - /// that it has no special handling for `\r`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::io; - /// - /// use bstr::io::BufReadExt; - /// - /// # fn example() -> Result<(), io::Error> { - /// let cursor = io::Cursor::new(b"lorem\x00ipsum\x00dolor"); - /// - /// let mut records = vec![]; - /// for result in cursor.byte_records(b'\x00') { - /// let record = result?; - /// records.push(record); - /// } - /// assert_eq!(records.len(), 3); - /// assert_eq!(records[0], "lorem".as_bytes()); - /// assert_eq!(records[1], "ipsum".as_bytes()); - /// assert_eq!(records[2], "dolor".as_bytes()); - /// # Ok(()) }; example().unwrap() - /// ``` - fn byte_records(self, terminator: u8) -> ByteRecords - where - Self: Sized, - { - ByteRecords { terminator, buf: self } - } - - /// Executes the given closure on each line in the underlying reader. - /// - /// If the closure returns an error (or if the underlying reader returns an - /// error), then iteration is stopped and the error is returned. If false - /// is returned, then iteration is stopped and no error is returned. - /// - /// The closure given is called on exactly the same values as yielded by - /// the [`byte_lines`](trait.BufReadExt.html#method.byte_lines) - /// iterator. Namely, lines do _not_ contain trailing `\n` or `\r\n` bytes. - /// - /// This routine is useful for iterating over lines as quickly as - /// possible. Namely, a single allocation is reused for each line. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::io; - /// - /// use bstr::io::BufReadExt; - /// - /// # fn example() -> Result<(), io::Error> { - /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); - /// - /// let mut lines = vec![]; - /// cursor.for_byte_line(|line| { - /// lines.push(line.to_vec()); - /// Ok(true) - /// })?; - /// assert_eq!(lines.len(), 3); - /// assert_eq!(lines[0], "lorem".as_bytes()); - /// assert_eq!(lines[1], "ipsum".as_bytes()); - /// assert_eq!(lines[2], "dolor".as_bytes()); - /// # Ok(()) }; example().unwrap() - /// ``` - fn for_byte_line(self, mut for_each_line: F) -> io::Result<()> - where - Self: Sized, - F: FnMut(&[u8]) -> io::Result, - { - self.for_byte_line_with_terminator(|line| { - for_each_line(&trim_line_slice(&line)) - }) - } - - /// Executes the given closure on each byte-terminated record in the - /// underlying reader. - /// - /// If the closure returns an error (or if the underlying reader returns an - /// error), then iteration is stopped and the error is returned. If false - /// is returned, then iteration is stopped and no error is returned. - /// - /// The closure given is called on exactly the same values as yielded by - /// the [`byte_records`](trait.BufReadExt.html#method.byte_records) - /// iterator. Namely, records do _not_ contain a trailing terminator byte. - /// - /// This routine is useful for iterating over records as quickly as - /// possible. Namely, a single allocation is reused for each record. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::io; - /// - /// use bstr::io::BufReadExt; - /// - /// # fn example() -> Result<(), io::Error> { - /// let cursor = io::Cursor::new(b"lorem\x00ipsum\x00dolor"); - /// - /// let mut records = vec![]; - /// cursor.for_byte_record(b'\x00', |record| { - /// records.push(record.to_vec()); - /// Ok(true) - /// })?; - /// assert_eq!(records.len(), 3); - /// assert_eq!(records[0], "lorem".as_bytes()); - /// assert_eq!(records[1], "ipsum".as_bytes()); - /// assert_eq!(records[2], "dolor".as_bytes()); - /// # Ok(()) }; example().unwrap() - /// ``` - fn for_byte_record( - self, - terminator: u8, - mut for_each_record: F, - ) -> io::Result<()> - where - Self: Sized, - F: FnMut(&[u8]) -> io::Result, - { - self.for_byte_record_with_terminator(terminator, |chunk| { - for_each_record(&trim_record_slice(&chunk, terminator)) - }) - } - - /// Executes the given closure on each line in the underlying reader. - /// - /// If the closure returns an error (or if the underlying reader returns an - /// error), then iteration is stopped and the error is returned. If false - /// is returned, then iteration is stopped and no error is returned. - /// - /// Unlike - /// [`for_byte_line`](trait.BufReadExt.html#method.for_byte_line), - /// the lines given to the closure *do* include the line terminator, if one - /// exists. - /// - /// This routine is useful for iterating over lines as quickly as - /// possible. Namely, a single allocation is reused for each line. - /// - /// This is identical to `for_byte_record_with_terminator` with a - /// terminator of `\n`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::io; - /// - /// use bstr::io::BufReadExt; - /// - /// # fn example() -> Result<(), io::Error> { - /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); - /// - /// let mut lines = vec![]; - /// cursor.for_byte_line_with_terminator(|line| { - /// lines.push(line.to_vec()); - /// Ok(true) - /// })?; - /// assert_eq!(lines.len(), 3); - /// assert_eq!(lines[0], "lorem\n".as_bytes()); - /// assert_eq!(lines[1], "ipsum\r\n".as_bytes()); - /// assert_eq!(lines[2], "dolor".as_bytes()); - /// # Ok(()) }; example().unwrap() - /// ``` - fn for_byte_line_with_terminator( - self, - for_each_line: F, - ) -> io::Result<()> - where - Self: Sized, - F: FnMut(&[u8]) -> io::Result, - { - self.for_byte_record_with_terminator(b'\n', for_each_line) - } - - /// Executes the given closure on each byte-terminated record in the - /// underlying reader. - /// - /// If the closure returns an error (or if the underlying reader returns an - /// error), then iteration is stopped and the error is returned. If false - /// is returned, then iteration is stopped and no error is returned. - /// - /// Unlike - /// [`for_byte_record`](trait.BufReadExt.html#method.for_byte_record), - /// the lines given to the closure *do* include the record terminator, if - /// one exists. - /// - /// This routine is useful for iterating over records as quickly as - /// possible. Namely, a single allocation is reused for each record. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::io; - /// - /// use bstr::B; - /// use bstr::io::BufReadExt; - /// - /// # fn example() -> Result<(), io::Error> { - /// let cursor = io::Cursor::new(b"lorem\x00ipsum\x00dolor"); - /// - /// let mut records = vec![]; - /// cursor.for_byte_record_with_terminator(b'\x00', |record| { - /// records.push(record.to_vec()); - /// Ok(true) - /// })?; - /// assert_eq!(records.len(), 3); - /// assert_eq!(records[0], B(b"lorem\x00")); - /// assert_eq!(records[1], B("ipsum\x00")); - /// assert_eq!(records[2], B("dolor")); - /// # Ok(()) }; example().unwrap() - /// ``` - fn for_byte_record_with_terminator( - mut self, - terminator: u8, - mut for_each_record: F, - ) -> io::Result<()> - where - Self: Sized, - F: FnMut(&[u8]) -> io::Result, - { - let mut bytes = vec![]; - let mut res = Ok(()); - let mut consumed = 0; - 'outer: loop { - // Lend out complete record slices from our buffer - { - let mut buf = self.fill_buf()?; - while let Some(index) = buf.find_byte(terminator) { - let (record, rest) = buf.split_at(index + 1); - buf = rest; - consumed += record.len(); - match for_each_record(&record) { - Ok(false) => break 'outer, - Err(err) => { - res = Err(err); - break 'outer; - } - _ => (), - } - } - - // Copy the final record fragment to our local buffer. This - // saves read_until() from re-scanning a buffer we know - // contains no remaining terminators. - bytes.extend_from_slice(&buf); - consumed += buf.len(); - } - - self.consume(consumed); - consumed = 0; - - // N.B. read_until uses a different version of memchr that may - // be slower than the memchr crate that bstr uses. However, this - // should only run for a fairly small number of records, assuming a - // decent buffer size. - self.read_until(terminator, &mut bytes)?; - if bytes.is_empty() || !for_each_record(&bytes)? { - break; - } - bytes.clear(); - } - self.consume(consumed); - res - } -} - -impl BufReadExt for B {} - -/// An iterator over lines from an instance of -/// [`std::io::BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html). -/// -/// This iterator is generally created by calling the -/// [`byte_lines`](trait.BufReadExt.html#method.byte_lines) -/// method on the -/// [`BufReadExt`](trait.BufReadExt.html) -/// trait. -#[derive(Debug)] -pub struct ByteLines { - buf: B, -} - -/// An iterator over records from an instance of -/// [`std::io::BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html). -/// -/// A byte record is any sequence of bytes terminated by a particular byte -/// chosen by the caller. For example, NUL separated byte strings are said to -/// be NUL-terminated byte records. -/// -/// This iterator is generally created by calling the -/// [`byte_records`](trait.BufReadExt.html#method.byte_records) -/// method on the -/// [`BufReadExt`](trait.BufReadExt.html) -/// trait. -#[derive(Debug)] -pub struct ByteRecords { - buf: B, - terminator: u8, -} - -impl Iterator for ByteLines { - type Item = io::Result>; - - fn next(&mut self) -> Option>> { - let mut bytes = vec![]; - match self.buf.read_until(b'\n', &mut bytes) { - Err(e) => Some(Err(e)), - Ok(0) => None, - Ok(_) => { - trim_line(&mut bytes); - Some(Ok(bytes)) - } - } - } -} - -impl Iterator for ByteRecords { - type Item = io::Result>; - - fn next(&mut self) -> Option>> { - let mut bytes = vec![]; - match self.buf.read_until(self.terminator, &mut bytes) { - Err(e) => Some(Err(e)), - Ok(0) => None, - Ok(_) => { - trim_record(&mut bytes, self.terminator); - Some(Ok(bytes)) - } - } - } -} - -fn trim_line(line: &mut Vec) { - if line.last_byte() == Some(b'\n') { - line.pop_byte(); - if line.last_byte() == Some(b'\r') { - line.pop_byte(); - } - } -} - -fn trim_line_slice(mut line: &[u8]) -> &[u8] { - if line.last_byte() == Some(b'\n') { - line = &line[..line.len() - 1]; - if line.last_byte() == Some(b'\r') { - line = &line[..line.len() - 1]; - } - } - line -} - -fn trim_record(record: &mut Vec, terminator: u8) { - if record.last_byte() == Some(terminator) { - record.pop_byte(); - } -} - -fn trim_record_slice(mut record: &[u8], terminator: u8) -> &[u8] { - if record.last_byte() == Some(terminator) { - record = &record[..record.len() - 1]; - } - record -} - -#[cfg(test)] -mod tests { - use super::BufReadExt; - use crate::bstring::BString; - - fn collect_lines>(slice: B) -> Vec { - let mut lines = vec![]; - slice - .as_ref() - .for_byte_line(|line| { - lines.push(BString::from(line.to_vec())); - Ok(true) - }) - .unwrap(); - lines - } - - fn collect_lines_term>(slice: B) -> Vec { - let mut lines = vec![]; - slice - .as_ref() - .for_byte_line_with_terminator(|line| { - lines.push(BString::from(line.to_vec())); - Ok(true) - }) - .unwrap(); - lines - } - - #[test] - fn lines_without_terminator() { - assert_eq!(collect_lines(""), Vec::::new()); - - assert_eq!(collect_lines("\n"), vec![""]); - assert_eq!(collect_lines("\n\n"), vec!["", ""]); - assert_eq!(collect_lines("a\nb\n"), vec!["a", "b"]); - assert_eq!(collect_lines("a\nb"), vec!["a", "b"]); - assert_eq!(collect_lines("abc\nxyz\n"), vec!["abc", "xyz"]); - assert_eq!(collect_lines("abc\nxyz"), vec!["abc", "xyz"]); - - assert_eq!(collect_lines("\r\n"), vec![""]); - assert_eq!(collect_lines("\r\n\r\n"), vec!["", ""]); - assert_eq!(collect_lines("a\r\nb\r\n"), vec!["a", "b"]); - assert_eq!(collect_lines("a\r\nb"), vec!["a", "b"]); - assert_eq!(collect_lines("abc\r\nxyz\r\n"), vec!["abc", "xyz"]); - assert_eq!(collect_lines("abc\r\nxyz"), vec!["abc", "xyz"]); - - assert_eq!(collect_lines("abc\rxyz"), vec!["abc\rxyz"]); - } - - #[test] - fn lines_with_terminator() { - assert_eq!(collect_lines_term(""), Vec::::new()); - - assert_eq!(collect_lines_term("\n"), vec!["\n"]); - assert_eq!(collect_lines_term("\n\n"), vec!["\n", "\n"]); - assert_eq!(collect_lines_term("a\nb\n"), vec!["a\n", "b\n"]); - assert_eq!(collect_lines_term("a\nb"), vec!["a\n", "b"]); - assert_eq!(collect_lines_term("abc\nxyz\n"), vec!["abc\n", "xyz\n"]); - assert_eq!(collect_lines_term("abc\nxyz"), vec!["abc\n", "xyz"]); - - assert_eq!(collect_lines_term("\r\n"), vec!["\r\n"]); - assert_eq!(collect_lines_term("\r\n\r\n"), vec!["\r\n", "\r\n"]); - assert_eq!(collect_lines_term("a\r\nb\r\n"), vec!["a\r\n", "b\r\n"]); - assert_eq!(collect_lines_term("a\r\nb"), vec!["a\r\n", "b"]); - assert_eq!( - collect_lines_term("abc\r\nxyz\r\n"), - vec!["abc\r\n", "xyz\r\n"] - ); - assert_eq!(collect_lines_term("abc\r\nxyz"), vec!["abc\r\n", "xyz"]); - - assert_eq!(collect_lines_term("abc\rxyz"), vec!["abc\rxyz"]); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/lib.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,437 +0,0 @@ -/*! -A byte string library. - -Byte strings are just like standard Unicode strings with one very important -difference: byte strings are only *conventionally* UTF-8 while Rust's standard -Unicode strings are *guaranteed* to be valid UTF-8. The primary motivation for -byte strings is for handling arbitrary bytes that are mostly UTF-8. - -# Overview - -This crate provides two important traits that provide string oriented methods -on `&[u8]` and `Vec` types: - -* [`ByteSlice`](trait.ByteSlice.html) extends the `[u8]` type with additional - string oriented methods. -* [`ByteVec`](trait.ByteVec.html) extends the `Vec` type with additional - string oriented methods. - -Additionally, this crate provides two concrete byte string types that deref to -`[u8]` and `Vec`. These are useful for storing byte string types, and come -with convenient `std::fmt::Debug` implementations: - -* [`BStr`](struct.BStr.html) is a byte string slice, analogous to `str`. -* [`BString`](struct.BString.html) is an owned growable byte string buffer, - analogous to `String`. - -Additionally, the free function [`B`](fn.B.html) serves as a convenient short -hand for writing byte string literals. - -# Quick examples - -Byte strings build on the existing APIs for `Vec` and `&[u8]`, with -additional string oriented methods. Operations such as iterating over -graphemes, searching for substrings, replacing substrings, trimming and case -conversion are examples of things not provided on the standard library `&[u8]` -APIs but are provided by this crate. For example, this code iterates over all -of occurrences of a subtring: - -``` -use bstr::ByteSlice; - -let s = b"foo bar foo foo quux foo"; - -let mut matches = vec![]; -for start in s.find_iter("foo") { - matches.push(start); -} -assert_eq!(matches, [0, 8, 12, 21]); -``` - -Here's another example showing how to do a search and replace (and also showing -use of the `B` function): - -``` -use bstr::{B, ByteSlice}; - -let old = B("foo ☃☃☃ foo foo quux foo"); -let new = old.replace("foo", "hello"); -assert_eq!(new, B("hello ☃☃☃ hello hello quux hello")); -``` - -And here's an example that shows case conversion, even in the presence of -invalid UTF-8: - -``` -use bstr::{ByteSlice, ByteVec}; - -let mut lower = Vec::from("hello β"); -lower[0] = b'\xFF'; -// lowercase β is uppercased to Β -assert_eq!(lower.to_uppercase(), b"\xFFELLO \xCE\x92"); -``` - -# Convenient debug representation - -When working with byte strings, it is often useful to be able to print them -as if they were byte strings and not sequences of integers. While this crate -cannot affect the `std::fmt::Debug` implementations for `[u8]` and `Vec`, -this crate does provide the `BStr` and `BString` types which have convenient -`std::fmt::Debug` implementations. - -For example, this - -``` -use bstr::ByteSlice; - -let mut bytes = Vec::from("hello β"); -bytes[0] = b'\xFF'; - -println!("{:?}", bytes.as_bstr()); -``` - -will output `"\xFFello β"`. - -This example works because the -[`ByteSlice::as_bstr`](trait.ByteSlice.html#method.as_bstr) -method converts any `&[u8]` to a `&BStr`. - -# When should I use byte strings? - -This library reflects my hypothesis that UTF-8 by convention is a better trade -off in some circumstances than guaranteed UTF-8. It's possible, perhaps even -likely, that this is a niche concern for folks working closely with core text -primitives. - -The first time this idea hit me was in the implementation of Rust's regex -engine. In particular, very little of the internal implementation cares at all -about searching valid UTF-8 encoded strings. Indeed, internally, the -implementation converts `&str` from the API to `&[u8]` fairly quickly and -just deals with raw bytes. UTF-8 match boundaries are then guaranteed by the -finite state machine itself rather than any specific string type. This makes it -possible to not only run regexes on `&str` values, but also on `&[u8]` values. - -Why would you ever want to run a regex on a `&[u8]` though? Well, `&[u8]` is -the fundamental way at which one reads data from all sorts of streams, via the -standard library's [`Read`](https://doc.rust-lang.org/std/io/trait.Read.html) -trait. In particular, there is no platform independent way to determine whether -what you're reading from is some binary file or a human readable text file. -Therefore, if you're writing a program to search files, you probably need to -deal with `&[u8]` directly unless you're okay with first converting it to a -`&str` and dropping any bytes that aren't valid UTF-8. (Or otherwise determine -the encoding---which is often impractical---and perform a transcoding step.) -Often, the simplest and most robust way to approach this is to simply treat the -contents of a file as if it were mostly valid UTF-8 and pass through invalid -UTF-8 untouched. This may not be the most correct approach though! - -One case in particular exacerbates these issues, and that's memory mapping -a file. When you memory map a file, that file may be gigabytes big, but all -you get is a `&[u8]`. Converting that to a `&str` all in one go is generally -not a good idea because of the costs associated with doing so, and also -because it generally causes one to do two passes over the data instead of -one, which is quite undesirable. It is of course usually possible to do it an -incremental way by only parsing chunks at a time, but this is often complex to -do or impractical. For example, many regex engines only accept one contiguous -sequence of bytes at a time with no way to perform incremental matching. - -In summary, conventional UTF-8 byte strings provided by this library are -definitely useful in some limited circumstances, but how useful they are more -broadly isn't clear yet. - -# `bstr` in public APIs - -Since this library is not yet `1.0`, you should not use it in the public API of -your crates until it hits `1.0` (unless you're OK with with tracking breaking -releases of `bstr`). It is expected that `bstr 1.0` will be released before -2022. - -In general, it should be possible to avoid putting anything in this crate into -your public APIs. Namely, you should never need to use the `ByteSlice` or -`ByteVec` traits as bounds on public APIs, since their only purpose is to -extend the methods on the concrete types `[u8]` and `Vec`, respectively. -Similarly, it should not be necessary to put either the `BStr` or `BString` -types into public APIs. If you want to use them internally, then they can -be converted to/from `[u8]`/`Vec` as needed. - -# Differences with standard strings - -The primary difference between `[u8]` and `str` is that the former is -conventionally UTF-8 while the latter is guaranteed to be UTF-8. The phrase -"conventionally UTF-8" means that a `[u8]` may contain bytes that do not form -a valid UTF-8 sequence, but operations defined on the type in this crate are -generally most useful on valid UTF-8 sequences. For example, iterating over -Unicode codepoints or grapheme clusters is an operation that is only defined -on valid UTF-8. Therefore, when invalid UTF-8 is encountered, the Unicode -replacement codepoint is substituted. Thus, a byte string that is not UTF-8 at -all is of limited utility when using these crate. - -However, not all operations on byte strings are specifically Unicode aware. For -example, substring search has no specific Unicode semantics ascribed to it. It -works just as well for byte strings that are completely valid UTF-8 as for byte -strings that contain no valid UTF-8 at all. Similarly for replacements and -various other operations that do not need any Unicode specific tailoring. - -Aside from the difference in how UTF-8 is handled, the APIs between `[u8]` and -`str` (and `Vec` and `String`) are intentionally very similar, including -maintaining the same behavior for corner cases in things like substring -splitting. There are, however, some differences: - -* Substring search is not done with `matches`, but instead, `find_iter`. - In general, this crate does not define any generic - [`Pattern`](https://doc.rust-lang.org/std/str/pattern/trait.Pattern.html) - infrastructure, and instead prefers adding new methods for different - argument types. For example, `matches` can search by a `char` or a `&str`, - where as `find_iter` can only search by a byte string. `find_char` can be - used for searching by a `char`. -* Since `SliceConcatExt` in the standard library is unstable, it is not - possible to reuse that to implement `join` and `concat` methods. Instead, - [`join`](fn.join.html) and [`concat`](fn.concat.html) are provided as free - functions that perform a similar task. -* This library bundles in a few more Unicode operations, such as grapheme, - word and sentence iterators. More operations, such as normalization and - case folding, may be provided in the future. -* Some `String`/`str` APIs will panic if a particular index was not on a valid - UTF-8 code unit sequence boundary. Conversely, no such checking is performed - in this crate, as is consistent with treating byte strings as a sequence of - bytes. This means callers are responsible for maintaining a UTF-8 invariant - if that's important. -* Some routines provided by this crate, such as `starts_with_str`, have a - `_str` suffix to differentiate them from similar routines already defined - on the `[u8]` type. The difference is that `starts_with` requires its - parameter to be a `&[u8]`, where as `starts_with_str` permits its parameter - to by anything that implements `AsRef<[u8]>`, which is more flexible. This - means you can write `bytes.starts_with_str("☃")` instead of - `bytes.starts_with("☃".as_bytes())`. - -Otherwise, you should find most of the APIs between this crate and the standard -library string APIs to be very similar, if not identical. - -# Handling of invalid UTF-8 - -Since byte strings are only *conventionally* UTF-8, there is no guarantee -that byte strings contain valid UTF-8. Indeed, it is perfectly legal for a -byte string to contain arbitrary bytes. However, since this library defines -a *string* type, it provides many operations specified by Unicode. These -operations are typically only defined over codepoints, and thus have no real -meaning on bytes that are invalid UTF-8 because they do not map to a particular -codepoint. - -For this reason, whenever operations defined only on codepoints are used, this -library will automatically convert invalid UTF-8 to the Unicode replacement -codepoint, `U+FFFD`, which looks like this: `�`. For example, an -[iterator over codepoints](struct.Chars.html) will yield a Unicode -replacement codepoint whenever it comes across bytes that are not valid UTF-8: - -``` -use bstr::ByteSlice; - -let bs = b"a\xFF\xFFz"; -let chars: Vec = bs.chars().collect(); -assert_eq!(vec!['a', '\u{FFFD}', '\u{FFFD}', 'z'], chars); -``` - -There are a few ways in which invalid bytes can be substituted with a Unicode -replacement codepoint. One way, not used by this crate, is to replace every -individual invalid byte with a single replacement codepoint. In contrast, the -approach this crate uses is called the "substitution of maximal subparts," as -specified by the Unicode Standard (Chapter 3, Section 9). (This approach is -also used by [W3C's Encoding Standard](https://www.w3.org/TR/encoding/).) In -this strategy, a replacement codepoint is inserted whenever a byte is found -that cannot possibly lead to a valid UTF-8 code unit sequence. If there were -previous bytes that represented a *prefix* of a well-formed UTF-8 code unit -sequence, then all of those bytes (up to 3) are substituted with a single -replacement codepoint. For example: - -``` -use bstr::ByteSlice; - -let bs = b"a\xF0\x9F\x87z"; -let chars: Vec = bs.chars().collect(); -// The bytes \xF0\x9F\x87 could lead to a valid UTF-8 sequence, but 3 of them -// on their own are invalid. Only one replacement codepoint is substituted, -// which demonstrates the "substitution of maximal subparts" strategy. -assert_eq!(vec!['a', '\u{FFFD}', 'z'], chars); -``` - -If you do need to access the raw bytes for some reason in an iterator like -`Chars`, then you should use the iterator's "indices" variant, which gives -the byte offsets containing the invalid UTF-8 bytes that were substituted with -the replacement codepoint. For example: - -``` -use bstr::{B, ByteSlice}; - -let bs = b"a\xE2\x98z"; -let chars: Vec<(usize, usize, char)> = bs.char_indices().collect(); -// Even though the replacement codepoint is encoded as 3 bytes itself, the -// byte range given here is only two bytes, corresponding to the original -// raw bytes. -assert_eq!(vec![(0, 1, 'a'), (1, 3, '\u{FFFD}'), (3, 4, 'z')], chars); - -// Thus, getting the original raw bytes is as simple as slicing the original -// byte string: -let chars: Vec<&[u8]> = bs.char_indices().map(|(s, e, _)| &bs[s..e]).collect(); -assert_eq!(vec![B("a"), B(b"\xE2\x98"), B("z")], chars); -``` - -# File paths and OS strings - -One of the premiere features of Rust's standard library is how it handles file -paths. In particular, it makes it very hard to write incorrect code while -simultaneously providing a correct cross platform abstraction for manipulating -file paths. The key challenge that one faces with file paths across platforms -is derived from the following observations: - -* On most Unix-like systems, file paths are an arbitrary sequence of bytes. -* On Windows, file paths are an arbitrary sequence of 16-bit integers. - -(In both cases, certain sequences aren't allowed. For example a `NUL` byte is -not allowed in either case. But we can ignore this for the purposes of this -section.) - -Byte strings, like the ones provided in this crate, line up really well with -file paths on Unix like systems, which are themselves just arbitrary sequences -of bytes. It turns out that if you treat them as "mostly UTF-8," then things -work out pretty well. On the contrary, byte strings _don't_ really work -that well on Windows because it's not possible to correctly roundtrip file -paths between 16-bit integers and something that looks like UTF-8 _without_ -explicitly defining an encoding to do this for you, which is anathema to byte -strings, which are just bytes. - -Rust's standard library elegantly solves this problem by specifying an -internal encoding for file paths that's only used on Windows called -[WTF-8](https://simonsapin.github.io/wtf-8/). Its key properties are that they -permit losslessly roundtripping file paths on Windows by extending UTF-8 to -support an encoding of surrogate codepoints, while simultaneously supporting -zero-cost conversion from Rust's Unicode strings to file paths. (Since UTF-8 is -a proper subset of WTF-8.) - -The fundamental point at which the above strategy fails is when you want to -treat file paths as things that look like strings in a zero cost way. In most -cases, this is actually the wrong thing to do, but some cases call for it, -for example, glob or regex matching on file paths. This is because WTF-8 is -treated as an internal implementation detail, and there is no way to access -those bytes via a public API. Therefore, such consumers are limited in what -they can do: - -1. One could re-implement WTF-8 and re-encode file paths on Windows to WTF-8 - by accessing their underlying 16-bit integer representation. Unfortunately, - this isn't zero cost (it introduces a second WTF-8 decoding step) and it's - not clear this is a good thing to do, since WTF-8 should ideally remain an - internal implementation detail. -2. One could instead declare that they will not handle paths on Windows that - are not valid UTF-16, and return an error when one is encountered. -3. Like (2), but instead of returning an error, lossily decode the file path - on Windows that isn't valid UTF-16 into UTF-16 by replacing invalid bytes - with the Unicode replacement codepoint. - -While this library may provide facilities for (1) in the future, currently, -this library only provides facilities for (2) and (3). In particular, a suite -of conversion functions are provided that permit converting between byte -strings, OS strings and file paths. For owned byte strings, they are: - -* [`ByteVec::from_os_string`](trait.ByteVec.html#method.from_os_string) -* [`ByteVec::from_os_str_lossy`](trait.ByteVec.html#method.from_os_str_lossy) -* [`ByteVec::from_path_buf`](trait.ByteVec.html#method.from_path_buf) -* [`ByteVec::from_path_lossy`](trait.ByteVec.html#method.from_path_lossy) -* [`ByteVec::into_os_string`](trait.ByteVec.html#method.into_os_string) -* [`ByteVec::into_os_string_lossy`](trait.ByteVec.html#method.into_os_string_lossy) -* [`ByteVec::into_path_buf`](trait.ByteVec.html#method.into_path_buf) -* [`ByteVec::into_path_buf_lossy`](trait.ByteVec.html#method.into_path_buf_lossy) - -For byte string slices, they are: - -* [`ByteSlice::from_os_str`](trait.ByteSlice.html#method.from_os_str) -* [`ByteSlice::from_path`](trait.ByteSlice.html#method.from_path) -* [`ByteSlice::to_os_str`](trait.ByteSlice.html#method.to_os_str) -* [`ByteSlice::to_os_str_lossy`](trait.ByteSlice.html#method.to_os_str_lossy) -* [`ByteSlice::to_path`](trait.ByteSlice.html#method.to_path) -* [`ByteSlice::to_path_lossy`](trait.ByteSlice.html#method.to_path_lossy) - -On Unix, all of these conversions are rigorously zero cost, which gives one -a way to ergonomically deal with raw file paths exactly as they are using -normal string-related functions. On Windows, these conversion routines perform -a UTF-8 check and either return an error or lossily decode the file path -into valid UTF-8, depending on which function you use. This means that you -cannot roundtrip all file paths on Windows correctly using these conversion -routines. However, this may be an acceptable downside since such file paths -are exceptionally rare. Moreover, roundtripping isn't always necessary, for -example, if all you're doing is filtering based on file paths. - -The reason why using byte strings for this is potentially superior than the -standard library's approach is that a lot of Rust code is already lossily -converting file paths to Rust's Unicode strings, which are required to be valid -UTF-8, and thus contain latent bugs on Unix where paths with invalid UTF-8 are -not terribly uncommon. If you instead use byte strings, then you're guaranteed -to write correct code for Unix, at the cost of getting a corner case wrong on -Windows. -*/ - -#![cfg_attr(not(feature = "std"), no_std)] - -pub use crate::bstr::BStr; -#[cfg(feature = "std")] -pub use crate::bstring::BString; -pub use crate::ext_slice::{ - ByteSlice, Bytes, Fields, FieldsWith, Find, FindReverse, Finder, - FinderReverse, Lines, LinesWithTerminator, Split, SplitN, SplitNReverse, - SplitReverse, B, -}; -#[cfg(feature = "std")] -pub use crate::ext_vec::{concat, join, ByteVec, DrainBytes, FromUtf8Error}; -#[cfg(feature = "unicode")] -pub use crate::unicode::{ - GraphemeIndices, Graphemes, SentenceIndices, Sentences, WordIndices, - Words, WordsWithBreakIndices, WordsWithBreaks, -}; -pub use crate::utf8::{ - decode as decode_utf8, decode_last as decode_last_utf8, CharIndices, - Chars, Utf8Chunk, Utf8Chunks, Utf8Error, -}; - -mod ascii; -mod bstr; -#[cfg(feature = "std")] -mod bstring; -mod byteset; -mod ext_slice; -#[cfg(feature = "std")] -mod ext_vec; -mod impls; -#[cfg(feature = "std")] -pub mod io; -#[cfg(test)] -mod tests; -#[cfg(feature = "unicode")] -mod unicode; -mod utf8; - -#[cfg(test)] -mod apitests { - use crate::bstr::BStr; - use crate::bstring::BString; - use crate::ext_slice::{Finder, FinderReverse}; - - #[test] - fn oibits() { - use std::panic::{RefUnwindSafe, UnwindSafe}; - - fn assert_send() {} - fn assert_sync() {} - fn assert_unwind_safe() {} - - assert_send::<&BStr>(); - assert_sync::<&BStr>(); - assert_unwind_safe::<&BStr>(); - assert_send::(); - assert_sync::(); - assert_unwind_safe::(); - - assert_send::>(); - assert_sync::>(); - assert_unwind_safe::>(); - assert_send::>(); - assert_sync::>(); - assert_unwind_safe::>(); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/tests.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/tests.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/tests.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -/// A sequence of tests for checking whether lossy decoding uses the maximal -/// subpart strategy correctly. Namely, if a sequence of otherwise invalid -/// UTF-8 bytes is a valid prefix of a valid UTF-8 sequence, then the entire -/// prefix is replaced by a single replacement codepoint. In all other cases, -/// each invalid byte is replaced by a single replacement codepoint. -/// -/// The first element in each tuple is the expected result of lossy decoding, -/// while the second element is the input given. -pub const LOSSY_TESTS: &[(&str, &[u8])] = &[ - ("a", b"a"), - ("\u{FFFD}", b"\xFF"), - ("\u{FFFD}\u{FFFD}", b"\xFF\xFF"), - ("β\u{FFFD}", b"\xCE\xB2\xFF"), - ("☃\u{FFFD}", b"\xE2\x98\x83\xFF"), - ("𝝱\u{FFFD}", b"\xF0\x9D\x9D\xB1\xFF"), - ("\u{FFFD}\u{FFFD}", b"\xCE\xF0"), - ("\u{FFFD}\u{FFFD}", b"\xCE\xFF"), - ("\u{FFFD}\u{FFFD}", b"\xE2\x98\xF0"), - ("\u{FFFD}\u{FFFD}", b"\xE2\x98\xFF"), - ("\u{FFFD}", b"\xF0\x9D\x9D"), - ("\u{FFFD}\u{FFFD}", b"\xF0\x9D\x9D\xF0"), - ("\u{FFFD}\u{FFFD}", b"\xF0\x9D\x9D\xFF"), - ("\u{FFFD}", b"\xCE"), - ("a\u{FFFD}", b"a\xCE"), - ("\u{FFFD}", b"\xE2\x98"), - ("a\u{FFFD}", b"a\xE2\x98"), - ("\u{FFFD}", b"\xF0\x9D\x9C"), - ("a\u{FFFD}", b"a\xF0\x9D\x9C"), - ("a\u{FFFD}\u{FFFD}\u{FFFD}z", b"a\xED\xA0\x80z"), - ("☃βツ\u{FFFD}", b"\xe2\x98\x83\xce\xb2\xe3\x83\x84\xFF"), - ("a\u{FFFD}\u{FFFD}\u{FFFD}b", b"\x61\xF1\x80\x80\xE1\x80\xC2\x62"), -]; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/GraphemeBreakTest.txt cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/GraphemeBreakTest.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/GraphemeBreakTest.txt 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/GraphemeBreakTest.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,630 +0,0 @@ -# GraphemeBreakTest-12.1.0.txt -# Date: 2019-03-10, 10:53:12 GMT -# © 2019 Unicode®, Inc. -# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. -# For terms of use, see http://www.unicode.org/terms_of_use.html -# -# Unicode Character Database -# For documentation, see http://www.unicode.org/reports/tr44/ -# -# Default Grapheme_Cluster_Break Test -# -# Format: -# (# )? -# contains hex Unicode code points, with -# ÷ wherever there is a break opportunity, and -# × wherever there is not. -# the format can change, but currently it shows: -# - the sample character name -# - (x) the Grapheme_Cluster_Break property value for the sample character -# - [x] the rule that determines whether there is a break or not, -# as listed in the Rules section of GraphemeBreakTest.html -# -# These samples may be extended or changed in the future. -# -÷ 0020 ÷ 0020 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0020 × 0308 ÷ 0020 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0020 ÷ 000D ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (CR) ÷ [0.3] -÷ 0020 × 0308 ÷ 000D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0020 ÷ 000A ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (LF) ÷ [0.3] -÷ 0020 × 0308 ÷ 000A ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0020 ÷ 0001 ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (Control) ÷ [0.3] -÷ 0020 × 0308 ÷ 0001 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0020 × 034F ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0020 × 0308 × 034F ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0020 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0020 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0020 ÷ 0600 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0020 × 0308 ÷ 0600 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0020 × 0903 ÷ # ÷ [0.2] SPACE (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0020 × 0308 × 0903 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0020 ÷ 1100 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0020 × 0308 ÷ 1100 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0020 ÷ 1160 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0020 × 0308 ÷ 1160 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0020 ÷ 11A8 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0020 × 0308 ÷ 11A8 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0020 ÷ AC00 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0020 × 0308 ÷ AC00 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0020 ÷ AC01 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0020 × 0308 ÷ AC01 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0020 ÷ 231A ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0020 × 0308 ÷ 231A ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0020 × 0300 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0020 × 0308 × 0300 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0020 × 200D ÷ # ÷ [0.2] SPACE (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0020 × 0308 × 200D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0020 ÷ 0378 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] (Other) ÷ [0.3] -÷ 0020 × 0308 ÷ 0378 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 000D ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [4.0] SPACE (Other) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] (CR) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Control) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 000D ÷ 034F ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 000D ÷ 0308 × 034F ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 000D ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000D ÷ 0600 ÷ # ÷ [0.2] (CR) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 000D ÷ 0903 ÷ # ÷ [0.2] (CR) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 000D ÷ 0308 × 0903 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 000D ÷ 1100 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 000D ÷ 1160 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 000D ÷ 11A8 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 000D ÷ AC00 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 000D ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 000D ÷ AC01 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 000D ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 000D ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [4.0] WATCH (ExtPict) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 000D ÷ 200D ÷ # ÷ [0.2] (CR) ÷ [4.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 000D ÷ 0308 × 200D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 000D ÷ 0378 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Other) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 000A ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [4.0] SPACE (Other) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] (CR) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] (LF) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Control) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 000A ÷ 034F ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 000A ÷ 0308 × 034F ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 000A ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000A ÷ 0600 ÷ # ÷ [0.2] (LF) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 000A ÷ 0903 ÷ # ÷ [0.2] (LF) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 000A ÷ 0308 × 0903 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 000A ÷ 1100 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 000A ÷ 1160 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 000A ÷ 11A8 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 000A ÷ AC00 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 000A ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 000A ÷ AC01 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 000A ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 000A ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [4.0] WATCH (ExtPict) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 000A ÷ 200D ÷ # ÷ [0.2] (LF) ÷ [4.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 000A ÷ 0308 × 200D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 000A ÷ 0378 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Other) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 0001 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] SPACE (Other) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0001 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] (CR) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0001 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] (LF) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0001 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Control) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0001 ÷ 034F ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0001 ÷ 0308 × 034F ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0001 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0001 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0001 ÷ 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0001 ÷ 0308 × 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0001 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0001 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0001 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0001 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0001 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0001 ÷ 231A ÷ # ÷ [0.2] (Control) ÷ [4.0] WATCH (ExtPict) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0001 ÷ 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0001 ÷ 0308 × 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0001 ÷ 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0001 ÷ 0308 × 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0001 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Other) ÷ [0.3] -÷ 0001 ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 034F ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 034F × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 034F ÷ 000D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [5.0] (CR) ÷ [0.3] -÷ 034F × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 034F ÷ 000A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [5.0] (LF) ÷ [0.3] -÷ 034F × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 034F ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [5.0] (Control) ÷ [0.3] -÷ 034F × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 034F × 034F ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 034F × 0308 × 034F ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 034F ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 034F × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 034F ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 034F × 0308 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 034F × 0903 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 034F × 0308 × 0903 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 034F ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 034F × 0308 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 034F ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 034F × 0308 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 034F ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 034F × 0308 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 034F ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 034F × 0308 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 034F ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 034F × 0308 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 034F ÷ 231A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 034F × 0308 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 034F × 0300 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 034F × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 034F × 200D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 034F × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 034F ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] (Other) ÷ [0.3] -÷ 034F × 0308 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 1F1E6 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1F1E6 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (CR) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 1F1E6 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (LF) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 1F1E6 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (Control) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 1F1E6 × 034F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 1F1E6 × 0308 × 034F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 1F1E6 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [12.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1F1E6 ÷ 0600 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0600 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 1F1E6 × 0903 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 1F1E6 × 0308 × 0903 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 1F1E6 ÷ 1100 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 1100 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1F1E6 ÷ 1160 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 1160 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 1F1E6 ÷ 11A8 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 11A8 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 1F1E6 ÷ AC00 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ AC00 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 1F1E6 ÷ AC01 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ AC01 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 1F1E6 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1F1E6 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 1F1E6 × 0308 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 1F1E6 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 1F1E6 × 0308 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 1F1E6 ÷ 0378 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] (Other) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0378 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 0600 × 0020 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] SPACE (Other) ÷ [0.3] -÷ 0600 × 0308 ÷ 0020 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0600 ÷ 000D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (CR) ÷ [0.3] -÷ 0600 × 0308 ÷ 000D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0600 ÷ 000A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (LF) ÷ [0.3] -÷ 0600 × 0308 ÷ 000A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0600 ÷ 0001 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (Control) ÷ [0.3] -÷ 0600 × 0308 ÷ 0001 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0600 × 034F ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0600 × 0308 × 034F ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0600 × 1F1E6 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0600 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0600 × 0600 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0600 × 0308 ÷ 0600 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0600 × 0903 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0600 × 0308 × 0903 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0600 × 1100 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0600 × 0308 ÷ 1100 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0600 × 1160 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0600 × 0308 ÷ 1160 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0600 × 11A8 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0600 × 0308 ÷ 11A8 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0600 × AC00 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0600 × 0308 ÷ AC00 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0600 × AC01 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0600 × 0308 ÷ AC01 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0600 × 231A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] WATCH (ExtPict) ÷ [0.3] -÷ 0600 × 0308 ÷ 231A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0600 × 0300 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0600 × 0308 × 0300 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0600 × 200D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0600 × 0308 × 200D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0600 × 0378 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] (Other) ÷ [0.3] -÷ 0600 × 0308 ÷ 0378 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 0903 ÷ 0020 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0903 × 0308 ÷ 0020 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0903 ÷ 000D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (CR) ÷ [0.3] -÷ 0903 × 0308 ÷ 000D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0903 ÷ 000A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (LF) ÷ [0.3] -÷ 0903 × 0308 ÷ 000A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0903 ÷ 0001 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (Control) ÷ [0.3] -÷ 0903 × 0308 ÷ 0001 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0903 × 034F ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0903 × 0308 × 034F ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0903 ÷ 1F1E6 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0903 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0903 ÷ 0600 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0903 × 0308 ÷ 0600 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0903 × 0903 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0903 × 0308 × 0903 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0903 ÷ 1100 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0903 × 0308 ÷ 1100 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0903 ÷ 1160 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0903 × 0308 ÷ 1160 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0903 ÷ 11A8 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0903 × 0308 ÷ 11A8 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0903 ÷ AC00 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0903 × 0308 ÷ AC00 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0903 ÷ AC01 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0903 × 0308 ÷ AC01 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0903 ÷ 231A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0903 × 0308 ÷ 231A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0903 × 0300 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0903 × 0308 × 0300 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0903 × 200D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0903 × 0308 × 200D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0903 ÷ 0378 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] (Other) ÷ [0.3] -÷ 0903 × 0308 ÷ 0378 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 1100 ÷ 0020 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1100 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1100 ÷ 000D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (CR) ÷ [0.3] -÷ 1100 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 1100 ÷ 000A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (LF) ÷ [0.3] -÷ 1100 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 1100 ÷ 0001 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (Control) ÷ [0.3] -÷ 1100 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 1100 × 034F ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 1100 × 0308 × 034F ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 1100 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1100 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1100 ÷ 0600 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 1100 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 1100 × 0903 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 1100 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 1100 × 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1100 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1100 × 1160 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 1100 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 1100 ÷ 11A8 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 1100 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 1100 × AC00 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 1100 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 1100 × AC01 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 1100 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 1100 ÷ 231A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1100 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1100 × 0300 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 1100 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 1100 × 200D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 1100 × 0308 × 200D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 1100 ÷ 0378 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] (Other) ÷ [0.3] -÷ 1100 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 1160 ÷ 0020 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1160 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1160 ÷ 000D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (CR) ÷ [0.3] -÷ 1160 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 1160 ÷ 000A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (LF) ÷ [0.3] -÷ 1160 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 1160 ÷ 0001 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (Control) ÷ [0.3] -÷ 1160 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 1160 × 034F ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 1160 × 0308 × 034F ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 1160 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1160 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1160 ÷ 0600 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 1160 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 1160 × 0903 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 1160 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 1160 ÷ 1100 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1160 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1160 × 1160 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [7.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 1160 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 1160 × 11A8 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 1160 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 1160 ÷ AC00 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 1160 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 1160 ÷ AC01 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 1160 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 1160 ÷ 231A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1160 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1160 × 0300 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 1160 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 1160 × 200D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 1160 × 0308 × 200D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 1160 ÷ 0378 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] (Other) ÷ [0.3] -÷ 1160 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 11A8 ÷ 0020 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 11A8 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 11A8 ÷ 000D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (CR) ÷ [0.3] -÷ 11A8 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 11A8 ÷ 000A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (LF) ÷ [0.3] -÷ 11A8 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 11A8 ÷ 0001 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (Control) ÷ [0.3] -÷ 11A8 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 11A8 × 034F ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 11A8 × 0308 × 034F ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 11A8 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 11A8 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 11A8 ÷ 0600 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 11A8 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 11A8 × 0903 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 11A8 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 11A8 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 11A8 ÷ 1160 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 11A8 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 11A8 × 11A8 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 11A8 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 11A8 ÷ AC00 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 11A8 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 11A8 ÷ AC01 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 11A8 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 11A8 ÷ 231A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 11A8 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 11A8 × 0300 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 11A8 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 11A8 × 200D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 11A8 × 0308 × 200D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 11A8 ÷ 0378 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] (Other) ÷ [0.3] -÷ 11A8 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ AC00 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ AC00 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ AC00 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (CR) ÷ [0.3] -÷ AC00 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ AC00 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (LF) ÷ [0.3] -÷ AC00 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ AC00 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (Control) ÷ [0.3] -÷ AC00 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ AC00 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ AC00 × 0308 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ AC00 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ AC00 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ AC00 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ AC00 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ AC00 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ AC00 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ AC00 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ AC00 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ AC00 × 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ AC00 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ AC00 × 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ AC00 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ AC00 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ AC00 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ AC00 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ AC00 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ AC00 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ AC00 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ AC00 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ AC00 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ AC00 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ AC00 × 0308 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ AC00 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] (Other) ÷ [0.3] -÷ AC00 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ AC01 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ AC01 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ AC01 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (CR) ÷ [0.3] -÷ AC01 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ AC01 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (LF) ÷ [0.3] -÷ AC01 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ AC01 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (Control) ÷ [0.3] -÷ AC01 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ AC01 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ AC01 × 0308 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ AC01 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ AC01 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ AC01 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ AC01 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ AC01 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ AC01 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ AC01 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ AC01 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ AC01 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ AC01 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ AC01 × 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ AC01 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ AC01 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ AC01 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ AC01 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ AC01 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ AC01 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ AC01 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ AC01 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ AC01 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ AC01 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ AC01 × 0308 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ AC01 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] (Other) ÷ [0.3] -÷ AC01 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 231A ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 231A × 0308 ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 231A ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [5.0] (CR) ÷ [0.3] -÷ 231A × 0308 ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 231A ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [5.0] (LF) ÷ [0.3] -÷ 231A × 0308 ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 231A ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [5.0] (Control) ÷ [0.3] -÷ 231A × 0308 ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 231A × 034F ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 231A × 0308 × 034F ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 231A ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 231A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 231A ÷ 0600 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 231A × 0308 ÷ 0600 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 231A × 0903 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 231A × 0308 × 0903 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 231A ÷ 1100 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 231A × 0308 ÷ 1100 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 231A ÷ 1160 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 231A × 0308 ÷ 1160 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 231A ÷ 11A8 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 231A × 0308 ÷ 11A8 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 231A ÷ AC00 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 231A × 0308 ÷ AC00 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 231A ÷ AC01 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 231A × 0308 ÷ AC01 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 231A ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 231A × 0308 ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 231A × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 231A × 0308 × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 231A × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 231A × 0308 × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 231A ÷ 0378 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] (Other) ÷ [0.3] -÷ 231A × 0308 ÷ 0378 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 0300 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0300 × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0300 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0300 × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0300 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0300 × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0300 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0300 × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0300 × 034F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0300 × 0308 × 034F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0300 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0300 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0300 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0300 × 0308 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0300 × 0903 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0300 × 0308 × 0903 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0300 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0300 × 0308 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0300 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0300 × 0308 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0300 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0300 × 0308 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0300 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0300 × 0308 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0300 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0300 × 0308 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0300 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0300 × 0308 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0300 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0300 × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0300 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 0300 × 0308 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 200D ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 200D × 0308 ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 200D ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 200D × 0308 ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 200D ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 200D × 0308 ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 200D ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 200D × 0308 ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 200D × 034F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 200D × 0308 × 034F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 200D ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 200D × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 200D ÷ 0600 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 200D × 0308 ÷ 0600 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 200D × 0903 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 200D × 0308 × 0903 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 200D ÷ 1100 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 200D × 0308 ÷ 1100 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 200D ÷ 1160 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 200D × 0308 ÷ 1160 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 200D ÷ 11A8 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 200D × 0308 ÷ 11A8 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 200D ÷ AC00 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 200D × 0308 ÷ AC00 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 200D ÷ AC01 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 200D × 0308 ÷ AC01 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 200D ÷ 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 200D × 0308 ÷ 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 200D × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 200D × 0308 × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 200D × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 200D × 0308 × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 200D ÷ 0378 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 200D × 0308 ÷ 0378 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 0378 ÷ 0020 ÷ # ÷ [0.2] (Other) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0378 × 0308 ÷ 0020 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 0378 ÷ 000D ÷ # ÷ [0.2] (Other) ÷ [5.0] (CR) ÷ [0.3] -÷ 0378 × 0308 ÷ 000D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] -÷ 0378 ÷ 000A ÷ # ÷ [0.2] (Other) ÷ [5.0] (LF) ÷ [0.3] -÷ 0378 × 0308 ÷ 000A ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] -÷ 0378 ÷ 0001 ÷ # ÷ [0.2] (Other) ÷ [5.0] (Control) ÷ [0.3] -÷ 0378 × 0308 ÷ 0001 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] -÷ 0378 × 034F ÷ # ÷ [0.2] (Other) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0378 × 0308 × 034F ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] -÷ 0378 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0378 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0378 ÷ 0600 ÷ # ÷ [0.2] (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0378 × 0308 ÷ 0600 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] -÷ 0378 × 0903 ÷ # ÷ [0.2] (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0378 × 0308 × 0903 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] -÷ 0378 ÷ 1100 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0378 × 0308 ÷ 1100 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 0378 ÷ 1160 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0378 × 0308 ÷ 1160 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] -÷ 0378 ÷ 11A8 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0378 × 0308 ÷ 11A8 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] -÷ 0378 ÷ AC00 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0378 × 0308 ÷ AC00 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] -÷ 0378 ÷ AC01 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0378 × 0308 ÷ AC01 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] -÷ 0378 ÷ 231A ÷ # ÷ [0.2] (Other) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0378 × 0308 ÷ 231A ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0378 × 0300 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0378 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] -÷ 0378 × 200D ÷ # ÷ [0.2] (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0378 × 0308 × 200D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0378 ÷ 0378 ÷ # ÷ [0.2] (Other) ÷ [999.0] (Other) ÷ [0.3] -÷ 0378 × 0308 ÷ 0378 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] -÷ 000D × 000A ÷ 0061 ÷ 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [4.0] LATIN SMALL LETTER A (Other) ÷ [5.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [0.3] -÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [0.3] -÷ 0020 × 200D ÷ 0646 ÷ # ÷ [0.2] SPACE (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] ARABIC LETTER NOON (Other) ÷ [0.3] -÷ 0646 × 200D ÷ 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] -÷ 1100 × 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ AC00 × 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ AC01 × 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] -÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [12.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 1F1E7 × 200D ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 200D ÷ 1F1E7 × 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 × 1F1E9 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER D (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] -÷ 0061 × 0308 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 × 0903 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 0061 ÷ 0600 × 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) × [9.2] LATIN SMALL LETTER B (Other) ÷ [0.3] -÷ 1F476 × 1F3FF ÷ 1F476 ÷ # ÷ [0.2] BABY (ExtPict) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [999.0] BABY (ExtPict) ÷ [0.3] -÷ 0061 × 1F3FF ÷ 1F476 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [999.0] BABY (ExtPict) ÷ [0.3] -÷ 0061 × 1F3FF ÷ 1F476 × 200D × 1F6D1 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [999.0] BABY (ExtPict) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 1F476 × 1F3FF × 0308 × 200D × 1F476 × 1F3FF ÷ # ÷ [0.2] BABY (ExtPict) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] BABY (ExtPict) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [0.3] -÷ 1F6D1 × 200D × 1F6D1 ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 0061 × 200D ÷ 1F6D1 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 2701 × 200D × 2701 ÷ # ÷ [0.2] UPPER BLADE SCISSORS (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] UPPER BLADE SCISSORS (Other) ÷ [0.3] -÷ 0061 × 200D ÷ 2701 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] UPPER BLADE SCISSORS (Other) ÷ [0.3] -# -# Lines: 602 -# -# EOF diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/LICENSE-UNICODE cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/LICENSE-UNICODE --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/LICENSE-UNICODE 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/LICENSE-UNICODE 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE -See Terms of Use for definitions of Unicode Inc.'s -Data Files and Software. - -NOTICE TO USER: Carefully read the following legal agreement. -BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S -DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"), -YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE -TERMS AND CONDITIONS OF THIS AGREEMENT. -IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE -THE DATA FILES OR SOFTWARE. - -COPYRIGHT AND PERMISSION NOTICE - -Copyright © 1991-2019 Unicode, Inc. All rights reserved. -Distributed under the Terms of Use in https://www.unicode.org/copyright.html. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of the Unicode data files and any associated documentation -(the "Data Files") or Unicode software and any associated documentation -(the "Software") to deal in the Data Files or Software -without restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, and/or sell copies of -the Data Files or Software, and to permit persons to whom the Data Files -or Software are furnished to do so, provided that either -(a) this copyright and permission notice appear with all copies -of the Data Files or Software, or -(b) this copyright and permission notice appear in associated -Documentation. - -THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT OF THIRD PARTY RIGHTS. -IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS -NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL -DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, -DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THE DATA FILES OR SOFTWARE. - -Except as contained in this notice, the name of a copyright holder -shall not be used in advertising or otherwise to promote the sale, -use or other dealings in these Data Files or Software without prior -written authorization of the copyright holder. diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/SentenceBreakTest.txt cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/SentenceBreakTest.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/SentenceBreakTest.txt 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/SentenceBreakTest.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,530 +0,0 @@ -# SentenceBreakTest-12.1.0.txt -# Date: 2019-03-10, 10:53:28 GMT -# © 2019 Unicode®, Inc. -# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. -# For terms of use, see http://www.unicode.org/terms_of_use.html -# -# Unicode Character Database -# For documentation, see http://www.unicode.org/reports/tr44/ -# -# Default Sentence_Break Test -# -# Format: -# (# )? -# contains hex Unicode code points, with -# ÷ wherever there is a break opportunity, and -# × wherever there is not. -# the format can change, but currently it shows: -# - the sample character name -# - (x) the Sentence_Break property value for the sample character -# - [x] the rule that determines whether there is a break or not, -# as listed in the Rules section of SentenceBreakTest.html -# -# These samples may be extended or changed in the future. -# -÷ 0001 × 0001 ÷ # ÷ [0.2] (Other) × [998.0] (Other) ÷ [0.3] -÷ 0001 × 0308 × 0001 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0001 × 000D ÷ # ÷ [0.2] (Other) × [998.0] (CR) ÷ [0.3] -÷ 0001 × 0308 × 000D ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0001 × 000A ÷ # ÷ [0.2] (Other) × [998.0] (LF) ÷ [0.3] -÷ 0001 × 0308 × 000A ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0001 × 0085 ÷ # ÷ [0.2] (Other) × [998.0] (Sep) ÷ [0.3] -÷ 0001 × 0308 × 0085 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0001 × 0009 ÷ # ÷ [0.2] (Other) × [998.0] (Sp) ÷ [0.3] -÷ 0001 × 0308 × 0009 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0001 × 0061 ÷ # ÷ [0.2] (Other) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0001 × 0308 × 0061 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0001 × 0041 ÷ # ÷ [0.2] (Other) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0001 × 0308 × 0041 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0001 × 01BB ÷ # ÷ [0.2] (Other) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0001 × 0308 × 01BB ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0001 × 0030 ÷ # ÷ [0.2] (Other) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0001 × 0308 × 0030 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0001 × 002E ÷ # ÷ [0.2] (Other) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0001 × 0308 × 002E ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0001 × 0021 ÷ # ÷ [0.2] (Other) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0001 × 0308 × 0021 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0001 × 0022 ÷ # ÷ [0.2] (Other) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0001 × 0308 × 0022 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0001 × 002C ÷ # ÷ [0.2] (Other) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0001 × 0308 × 002C ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0001 × 00AD ÷ # ÷ [0.2] (Other) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0001 × 0308 × 00AD ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0001 × 0300 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0001 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Other) ÷ [0.3] -÷ 000D ÷ 0308 × 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] (CR) ÷ [0.3] -÷ 000D ÷ 0308 × 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] -÷ 000D ÷ 0308 × 000A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 000D ÷ 0085 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Sep) ÷ [0.3] -÷ 000D ÷ 0308 × 0085 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 000D ÷ 0009 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Sp) ÷ [0.3] -÷ 000D ÷ 0308 × 0009 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 000D ÷ 0061 ÷ # ÷ [0.2] (CR) ÷ [4.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 000D ÷ 0308 × 0061 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 000D ÷ 0041 ÷ # ÷ [0.2] (CR) ÷ [4.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 000D ÷ 0308 × 0041 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 000D ÷ 01BB ÷ # ÷ [0.2] (CR) ÷ [4.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 000D ÷ 0308 × 01BB ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 000D ÷ 0030 ÷ # ÷ [0.2] (CR) ÷ [4.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000D ÷ 0308 × 0030 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000D ÷ 002E ÷ # ÷ [0.2] (CR) ÷ [4.0] FULL STOP (ATerm) ÷ [0.3] -÷ 000D ÷ 0308 × 002E ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 000D ÷ 0021 ÷ # ÷ [0.2] (CR) ÷ [4.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 000D ÷ 0308 × 0021 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 000D ÷ 0022 ÷ # ÷ [0.2] (CR) ÷ [4.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 000D ÷ 0308 × 0022 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 000D ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [4.0] COMMA (SContinue) ÷ [0.3] -÷ 000D ÷ 0308 × 002C ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 000D ÷ 00AD ÷ # ÷ [0.2] (CR) ÷ [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000D ÷ 0308 × 00AD ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Other) ÷ [0.3] -÷ 000A ÷ 0308 × 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] (CR) ÷ [0.3] -÷ 000A ÷ 0308 × 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] (LF) ÷ [0.3] -÷ 000A ÷ 0308 × 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 000A ÷ 0085 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Sep) ÷ [0.3] -÷ 000A ÷ 0308 × 0085 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 000A ÷ 0009 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Sp) ÷ [0.3] -÷ 000A ÷ 0308 × 0009 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 000A ÷ 0061 ÷ # ÷ [0.2] (LF) ÷ [4.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 000A ÷ 0308 × 0061 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 000A ÷ 0041 ÷ # ÷ [0.2] (LF) ÷ [4.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 000A ÷ 0308 × 0041 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 000A ÷ 01BB ÷ # ÷ [0.2] (LF) ÷ [4.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 000A ÷ 0308 × 01BB ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 000A ÷ 0030 ÷ # ÷ [0.2] (LF) ÷ [4.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000A ÷ 0308 × 0030 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000A ÷ 002E ÷ # ÷ [0.2] (LF) ÷ [4.0] FULL STOP (ATerm) ÷ [0.3] -÷ 000A ÷ 0308 × 002E ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 000A ÷ 0021 ÷ # ÷ [0.2] (LF) ÷ [4.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 000A ÷ 0308 × 0021 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 000A ÷ 0022 ÷ # ÷ [0.2] (LF) ÷ [4.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 000A ÷ 0308 × 0022 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 000A ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [4.0] COMMA (SContinue) ÷ [0.3] -÷ 000A ÷ 0308 × 002C ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 000A ÷ 00AD ÷ # ÷ [0.2] (LF) ÷ [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000A ÷ 0308 × 00AD ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0085 ÷ 0001 ÷ # ÷ [0.2] (Sep) ÷ [4.0] (Other) ÷ [0.3] -÷ 0085 ÷ 0308 × 0001 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0085 ÷ 000D ÷ # ÷ [0.2] (Sep) ÷ [4.0] (CR) ÷ [0.3] -÷ 0085 ÷ 0308 × 000D ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0085 ÷ 000A ÷ # ÷ [0.2] (Sep) ÷ [4.0] (LF) ÷ [0.3] -÷ 0085 ÷ 0308 × 000A ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0085 ÷ 0085 ÷ # ÷ [0.2] (Sep) ÷ [4.0] (Sep) ÷ [0.3] -÷ 0085 ÷ 0308 × 0085 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0085 ÷ 0009 ÷ # ÷ [0.2] (Sep) ÷ [4.0] (Sp) ÷ [0.3] -÷ 0085 ÷ 0308 × 0009 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0085 ÷ 0061 ÷ # ÷ [0.2] (Sep) ÷ [4.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0085 ÷ 0308 × 0061 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0085 ÷ 0041 ÷ # ÷ [0.2] (Sep) ÷ [4.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0085 ÷ 0308 × 0041 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0085 ÷ 01BB ÷ # ÷ [0.2] (Sep) ÷ [4.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0085 ÷ 0308 × 01BB ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0085 ÷ 0030 ÷ # ÷ [0.2] (Sep) ÷ [4.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0085 ÷ 0308 × 0030 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0085 ÷ 002E ÷ # ÷ [0.2] (Sep) ÷ [4.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0085 ÷ 0308 × 002E ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0085 ÷ 0021 ÷ # ÷ [0.2] (Sep) ÷ [4.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0085 ÷ 0308 × 0021 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0085 ÷ 0022 ÷ # ÷ [0.2] (Sep) ÷ [4.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0085 ÷ 0308 × 0022 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0085 ÷ 002C ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMMA (SContinue) ÷ [0.3] -÷ 0085 ÷ 0308 × 002C ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0085 ÷ 00AD ÷ # ÷ [0.2] (Sep) ÷ [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0085 ÷ 0308 × 00AD ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0085 ÷ 0300 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0085 ÷ 0308 × 0300 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0009 × 0001 ÷ # ÷ [0.2] (Sp) × [998.0] (Other) ÷ [0.3] -÷ 0009 × 0308 × 0001 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0009 × 000D ÷ # ÷ [0.2] (Sp) × [998.0] (CR) ÷ [0.3] -÷ 0009 × 0308 × 000D ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0009 × 000A ÷ # ÷ [0.2] (Sp) × [998.0] (LF) ÷ [0.3] -÷ 0009 × 0308 × 000A ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0009 × 0085 ÷ # ÷ [0.2] (Sp) × [998.0] (Sep) ÷ [0.3] -÷ 0009 × 0308 × 0085 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0009 × 0009 ÷ # ÷ [0.2] (Sp) × [998.0] (Sp) ÷ [0.3] -÷ 0009 × 0308 × 0009 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0009 × 0061 ÷ # ÷ [0.2] (Sp) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0009 × 0308 × 0061 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0009 × 0041 ÷ # ÷ [0.2] (Sp) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0009 × 0308 × 0041 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0009 × 01BB ÷ # ÷ [0.2] (Sp) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0009 × 0308 × 01BB ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0009 × 0030 ÷ # ÷ [0.2] (Sp) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0009 × 0308 × 0030 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0009 × 002E ÷ # ÷ [0.2] (Sp) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0009 × 0308 × 002E ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0009 × 0021 ÷ # ÷ [0.2] (Sp) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0009 × 0308 × 0021 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0009 × 0022 ÷ # ÷ [0.2] (Sp) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0009 × 0308 × 0022 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0009 × 002C ÷ # ÷ [0.2] (Sp) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0009 × 0308 × 002C ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0009 × 00AD ÷ # ÷ [0.2] (Sp) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0009 × 0308 × 00AD ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0009 × 0300 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0009 × 0308 × 0300 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 × 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (Other) ÷ [0.3] -÷ 0061 × 0308 × 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0061 × 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (CR) ÷ [0.3] -÷ 0061 × 0308 × 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0061 × 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (LF) ÷ [0.3] -÷ 0061 × 0308 × 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0061 × 0085 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (Sep) ÷ [0.3] -÷ 0061 × 0308 × 0085 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0061 × 0009 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (Sp) ÷ [0.3] -÷ 0061 × 0308 × 0009 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0061 × 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0061 × 0308 × 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0061 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0061 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0061 × 01BB ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0061 × 0308 × 01BB ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0061 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 × 0308 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 × 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0061 × 0308 × 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0061 × 0021 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0061 × 0308 × 0021 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0061 × 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0061 × 0308 × 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0061 × 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0061 × 0308 × 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0061 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0041 × 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (Other) ÷ [0.3] -÷ 0041 × 0308 × 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0041 × 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (CR) ÷ [0.3] -÷ 0041 × 0308 × 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0041 × 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (LF) ÷ [0.3] -÷ 0041 × 0308 × 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0041 × 0085 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (Sep) ÷ [0.3] -÷ 0041 × 0308 × 0085 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0041 × 0009 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (Sp) ÷ [0.3] -÷ 0041 × 0308 × 0009 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0041 × 0061 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0041 × 0308 × 0061 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0041 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0041 × 0308 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0041 × 01BB ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0041 × 0308 × 01BB ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0041 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0041 × 0308 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0041 × 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0041 × 0308 × 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0041 × 0021 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0041 × 0308 × 0021 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0041 × 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0041 × 0308 × 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0041 × 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0041 × 0308 × 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0041 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0041 × 0308 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0041 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0041 × 0308 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 01BB × 0001 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (Other) ÷ [0.3] -÷ 01BB × 0308 × 0001 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 01BB × 000D ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (CR) ÷ [0.3] -÷ 01BB × 0308 × 000D ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 01BB × 000A ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (LF) ÷ [0.3] -÷ 01BB × 0308 × 000A ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 01BB × 0085 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (Sep) ÷ [0.3] -÷ 01BB × 0308 × 0085 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 01BB × 0009 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (Sp) ÷ [0.3] -÷ 01BB × 0308 × 0009 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 01BB × 0061 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 01BB × 0308 × 0061 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 01BB × 0041 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 01BB × 0308 × 0041 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 01BB × 01BB ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 01BB × 0308 × 01BB ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 01BB × 0030 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 01BB × 0308 × 0030 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 01BB × 002E ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 01BB × 0308 × 002E ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 01BB × 0021 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 01BB × 0308 × 0021 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 01BB × 0022 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 01BB × 0308 × 0022 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 01BB × 002C ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 01BB × 0308 × 002C ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 01BB × 00AD ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 01BB × 0308 × 00AD ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 01BB × 0300 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 01BB × 0308 × 0300 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0030 × 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (Other) ÷ [0.3] -÷ 0030 × 0308 × 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0030 × 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (CR) ÷ [0.3] -÷ 0030 × 0308 × 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0030 × 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (LF) ÷ [0.3] -÷ 0030 × 0308 × 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0030 × 0085 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (Sep) ÷ [0.3] -÷ 0030 × 0308 × 0085 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0030 × 0009 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (Sp) ÷ [0.3] -÷ 0030 × 0308 × 0009 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0030 × 0061 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0030 × 0308 × 0061 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0030 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0030 × 0308 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0030 × 01BB ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0030 × 0308 × 01BB ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0030 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0030 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0030 × 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0030 × 0308 × 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0030 × 0021 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0030 × 0308 × 0021 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0030 × 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0030 × 0308 × 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0030 × 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0030 × 0308 × 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0030 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0030 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0030 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0030 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002E ÷ 0001 ÷ # ÷ [0.2] FULL STOP (ATerm) ÷ [11.0] (Other) ÷ [0.3] -÷ 002E × 0308 ÷ 0001 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] (Other) ÷ [0.3] -÷ 002E × 000D ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (CR) ÷ [0.3] -÷ 002E × 0308 × 000D ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (CR) ÷ [0.3] -÷ 002E × 000A ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (LF) ÷ [0.3] -÷ 002E × 0308 × 000A ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (LF) ÷ [0.3] -÷ 002E × 0085 ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (Sep) ÷ [0.3] -÷ 002E × 0308 × 0085 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sep) ÷ [0.3] -÷ 002E × 0009 ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (Sp) ÷ [0.3] -÷ 002E × 0308 × 0009 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sp) ÷ [0.3] -÷ 002E × 0061 ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 002E × 0308 × 0061 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 002E ÷ 0041 ÷ # ÷ [0.2] FULL STOP (ATerm) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 002E × 0308 ÷ 0041 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 002E ÷ 01BB ÷ # ÷ [0.2] FULL STOP (ATerm) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 002E × 0308 ÷ 01BB ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 002E × 0030 ÷ # ÷ [0.2] FULL STOP (ATerm) × [6.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002E × 0308 × 0030 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [6.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002E × 002E ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.1] FULL STOP (ATerm) ÷ [0.3] -÷ 002E × 0308 × 002E ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] FULL STOP (ATerm) ÷ [0.3] -÷ 002E × 0021 ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 002E × 0308 × 0021 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 002E × 0022 ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 002E × 0308 × 0022 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 002E × 002C ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.1] COMMA (SContinue) ÷ [0.3] -÷ 002E × 0308 × 002C ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] COMMA (SContinue) ÷ [0.3] -÷ 002E × 00AD ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002E × 0308 × 00AD ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002E × 0300 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002E × 0308 × 0300 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0021 ÷ 0001 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] (Other) ÷ [0.3] -÷ 0021 × 0308 ÷ 0001 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] (Other) ÷ [0.3] -÷ 0021 × 000D ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (CR) ÷ [0.3] -÷ 0021 × 0308 × 000D ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (CR) ÷ [0.3] -÷ 0021 × 000A ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (LF) ÷ [0.3] -÷ 0021 × 0308 × 000A ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (LF) ÷ [0.3] -÷ 0021 × 0085 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (Sep) ÷ [0.3] -÷ 0021 × 0308 × 0085 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sep) ÷ [0.3] -÷ 0021 × 0009 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (Sp) ÷ [0.3] -÷ 0021 × 0308 × 0009 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sp) ÷ [0.3] -÷ 0021 ÷ 0061 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0021 × 0308 ÷ 0061 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0021 ÷ 0041 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0021 × 0308 ÷ 0041 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0021 ÷ 01BB ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0021 × 0308 ÷ 01BB ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0021 ÷ 0030 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0021 × 0308 ÷ 0030 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0021 × 002E ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [8.1] FULL STOP (ATerm) ÷ [0.3] -÷ 0021 × 0308 × 002E ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] FULL STOP (ATerm) ÷ [0.3] -÷ 0021 × 0021 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0021 × 0308 × 0021 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0021 × 0022 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0021 × 0308 × 0022 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0021 × 002C ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [8.1] COMMA (SContinue) ÷ [0.3] -÷ 0021 × 0308 × 002C ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] COMMA (SContinue) ÷ [0.3] -÷ 0021 × 00AD ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0021 × 0308 × 00AD ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0021 × 0300 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0021 × 0308 × 0300 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0022 × 0001 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (Other) ÷ [0.3] -÷ 0022 × 0308 × 0001 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0022 × 000D ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (CR) ÷ [0.3] -÷ 0022 × 0308 × 000D ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0022 × 000A ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (LF) ÷ [0.3] -÷ 0022 × 0308 × 000A ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0022 × 0085 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (Sep) ÷ [0.3] -÷ 0022 × 0308 × 0085 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0022 × 0009 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (Sp) ÷ [0.3] -÷ 0022 × 0308 × 0009 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0022 × 0061 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0022 × 0308 × 0061 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0022 × 0041 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0022 × 0308 × 0041 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0022 × 01BB ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0022 × 0308 × 01BB ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0022 × 0030 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0022 × 0308 × 0030 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0022 × 002E ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0022 × 0308 × 002E ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0022 × 0021 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0022 × 0308 × 0021 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0022 × 0022 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0022 × 0308 × 0022 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0022 × 002C ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0022 × 0308 × 002C ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0022 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0022 × 0308 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0022 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0022 × 0308 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002C × 0001 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (Other) ÷ [0.3] -÷ 002C × 0308 × 0001 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 002C × 000D ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (CR) ÷ [0.3] -÷ 002C × 0308 × 000D ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 002C × 000A ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (LF) ÷ [0.3] -÷ 002C × 0308 × 000A ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 002C × 0085 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (Sep) ÷ [0.3] -÷ 002C × 0308 × 0085 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 002C × 0009 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (Sp) ÷ [0.3] -÷ 002C × 0308 × 0009 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 002C × 0061 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 002C × 0308 × 0061 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 002C × 0041 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 002C × 0308 × 0041 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 002C × 01BB ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 002C × 0308 × 01BB ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 002C × 0030 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002C × 0308 × 0030 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002C × 002E ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 002C × 0308 × 002E ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 002C × 0021 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 002C × 0308 × 0021 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 002C × 0022 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 002C × 0308 × 0022 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 002C × 002C ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 002C × 0308 × 002C ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 002C × 00AD ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002C × 0300 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 00AD × 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (Other) ÷ [0.3] -÷ 00AD × 0308 × 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 00AD × 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (CR) ÷ [0.3] -÷ 00AD × 0308 × 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 00AD × 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (LF) ÷ [0.3] -÷ 00AD × 0308 × 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 00AD × 0085 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (Sep) ÷ [0.3] -÷ 00AD × 0308 × 0085 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 00AD × 0009 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (Sp) ÷ [0.3] -÷ 00AD × 0308 × 0009 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 00AD × 0061 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 00AD × 0308 × 0061 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 00AD × 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 00AD × 0308 × 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 00AD × 01BB ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 00AD × 0308 × 01BB ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 00AD × 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 00AD × 0308 × 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 00AD × 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 00AD × 0308 × 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 00AD × 0021 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 00AD × 0308 × 0021 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 00AD × 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 00AD × 0308 × 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 00AD × 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 00AD × 0308 × 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 00AD × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 00AD × 0308 × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 00AD × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 00AD × 0308 × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0300 × 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0300 × 0308 × 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] -÷ 0300 × 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0300 × 0308 × 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] -÷ 0300 × 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0300 × 0308 × 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] -÷ 0300 × 0085 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0300 × 0308 × 0085 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] -÷ 0300 × 0009 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0300 × 0308 × 0009 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] -÷ 0300 × 0061 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0300 × 0308 × 0061 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] -÷ 0300 × 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0300 × 0308 × 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] -÷ 0300 × 01BB ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0300 × 0308 × 01BB ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] -÷ 0300 × 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0300 × 0308 × 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0300 × 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0300 × 0308 × 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0300 × 0021 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0300 × 0308 × 0021 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] -÷ 0300 × 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0300 × 0308 × 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] -÷ 0300 × 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0300 × 0308 × 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] -÷ 0300 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0300 × 0308 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000D × 000A ÷ 0061 × 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [4.0] LATIN SMALL LETTER A (Lower) × [998.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] -÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] -÷ 0020 × 200D × 0646 ÷ # ÷ [0.2] SPACE (Sp) × [5.0] ZERO WIDTH JOINER (Extend_FE) × [998.0] ARABIC LETTER NOON (OLetter) ÷ [0.3] -÷ 0646 × 200D × 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (OLetter) × [5.0] ZERO WIDTH JOINER (Extend_FE) × [998.0] SPACE (Sp) ÷ [0.3] -÷ 0028 × 0022 × 0047 × 006F × 002E × 0022 × 0029 × 0020 ÷ 0028 × 0048 × 0065 × 0020 × 0064 × 0069 × 0064 × 002E × 0029 ÷ # ÷ [0.2] LEFT PARENTHESIS (Close) × [998.0] QUOTATION MARK (Close) × [998.0] LATIN CAPITAL LETTER G (Upper) × [998.0] LATIN SMALL LETTER O (Lower) × [998.0] FULL STOP (ATerm) × [9.0] QUOTATION MARK (Close) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] SPACE (Sp) ÷ [11.0] LEFT PARENTHESIS (Close) × [998.0] LATIN CAPITAL LETTER H (Upper) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] LATIN SMALL LETTER I (Lower) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) ÷ [0.3] -÷ 0028 × 201C × 0047 × 006F × 003F × 201D × 0029 × 0020 ÷ 0028 × 0048 × 0065 × 0020 × 0064 × 0069 × 0064 × 002E × 0029 ÷ # ÷ [0.2] LEFT PARENTHESIS (Close) × [998.0] LEFT DOUBLE QUOTATION MARK (Close) × [998.0] LATIN CAPITAL LETTER G (Upper) × [998.0] LATIN SMALL LETTER O (Lower) × [998.0] QUESTION MARK (STerm) × [9.0] RIGHT DOUBLE QUOTATION MARK (Close) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] SPACE (Sp) ÷ [11.0] LEFT PARENTHESIS (Close) × [998.0] LATIN CAPITAL LETTER H (Upper) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] LATIN SMALL LETTER I (Lower) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) ÷ [0.3] -÷ 0055 × 002E × 0053 × 002E × 0041 × 0300 × 002E × 0020 × 0069 × 0073 ÷ # ÷ [0.2] LATIN CAPITAL LETTER U (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER S (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) × [8.0] SPACE (Sp) × [8.0] LATIN SMALL LETTER I (Lower) × [998.0] LATIN SMALL LETTER S (Lower) ÷ [0.3] -÷ 0055 × 002E × 0053 × 002E × 0041 × 0300 × 003F × 0020 ÷ 0048 × 0065 ÷ # ÷ [0.2] LATIN CAPITAL LETTER U (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER S (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] QUESTION MARK (STerm) × [9.0] SPACE (Sp) ÷ [11.0] LATIN CAPITAL LETTER H (Upper) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0055 × 002E × 0053 × 002E × 0041 × 0300 × 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER U (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER S (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] -÷ 0033 × 002E × 0034 ÷ # ÷ [0.2] DIGIT THREE (Numeric) × [998.0] FULL STOP (ATerm) × [6.0] DIGIT FOUR (Numeric) ÷ [0.3] -÷ 0063 × 002E × 0064 ÷ # ÷ [0.2] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] LATIN SMALL LETTER D (Lower) ÷ [0.3] -÷ 0043 × 002E × 0064 ÷ # ÷ [0.2] LATIN CAPITAL LETTER C (Upper) × [998.0] FULL STOP (ATerm) × [8.0] LATIN SMALL LETTER D (Lower) ÷ [0.3] -÷ 0063 × 002E × 0044 ÷ # ÷ [0.2] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER D (Upper) ÷ [0.3] -÷ 0043 × 002E × 0044 ÷ # ÷ [0.2] LATIN CAPITAL LETTER C (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER D (Upper) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 0074 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] RIGHT PARENTHESIS (Close) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [8.0] NO-BREAK SPACE (Sp) × [8.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 ÷ 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [9.0] NO-BREAK SPACE (Sp) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 2018 × 0028 × 0074 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] RIGHT PARENTHESIS (Close) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [8.0] NO-BREAK SPACE (Sp) × [8.0] LEFT SINGLE QUOTATION MARK (Close) × [998.0] LEFT PARENTHESIS (Close) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 ÷ 2018 × 0028 × 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [9.0] NO-BREAK SPACE (Sp) ÷ [11.0] LEFT SINGLE QUOTATION MARK (Close) × [998.0] LEFT PARENTHESIS (Close) × [998.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 0308 × 0074 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] RIGHT PARENTHESIS (Close) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [8.0] NO-BREAK SPACE (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 0308 ÷ 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [9.0] NO-BREAK SPACE (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 0308 ÷ 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 0029 × 000A ÷ 0308 × 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 0074 × 0068 × 0065 × 0020 × 0072 × 0065 × 0073 × 0070 × 002E × 0020 × 006C × 0065 × 0061 × 0064 × 0065 × 0072 × 0073 × 0020 × 0061 × 0072 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER R (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER S (Lower) × [998.0] LATIN SMALL LETTER P (Lower) × [998.0] FULL STOP (ATerm) × [8.0] SPACE (Sp) × [8.0] LATIN SMALL LETTER L (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER A (Lower) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER R (Lower) × [998.0] LATIN SMALL LETTER S (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER A (Lower) × [998.0] LATIN SMALL LETTER R (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] -÷ 5B57 × 002E ÷ 5B57 ÷ # ÷ [0.2] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [998.0] FULL STOP (ATerm) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E ÷ 5B83 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) ÷ [0.3] -÷ 0065 × 0074 × 0063 × 002E × 3002 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.1] IDEOGRAPHIC FULL STOP (STerm) ÷ [0.3] -÷ 5B57 × 3002 ÷ 5B83 ÷ # ÷ [0.2] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [998.0] IDEOGRAPHIC FULL STOP (STerm) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) ÷ [0.3] -÷ 0021 × 0020 × 0020 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] SPACE (Sp) × [10.0] SPACE (Sp) ÷ [0.3] -÷ 2060 × 0028 × 2060 × 0022 × 2060 × 0047 × 2060 × 006F × 2060 × 002E × 2060 × 0022 × 2060 × 0029 × 2060 × 0020 × 2060 ÷ 0028 × 2060 × 0048 × 2060 × 0065 × 2060 × 0020 × 2060 × 0064 × 2060 × 0069 × 2060 × 0064 × 2060 × 002E × 2060 × 0029 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER G (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER O (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER H (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER I (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0028 × 2060 × 201C × 2060 × 0047 × 2060 × 006F × 2060 × 003F × 2060 × 201D × 2060 × 0029 × 2060 × 0020 × 2060 ÷ 0028 × 2060 × 0048 × 2060 × 0065 × 2060 × 0020 × 2060 × 0064 × 2060 × 0069 × 2060 × 0064 × 2060 × 002E × 2060 × 0029 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LEFT DOUBLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER G (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER O (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] QUESTION MARK (STerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT DOUBLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER H (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER I (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0055 × 2060 × 002E × 2060 × 0053 × 2060 × 002E × 2060 × 0041 × 2060 × 0300 × 002E × 2060 × 0020 × 2060 × 0069 × 2060 × 0073 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER U (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER S (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER I (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER S (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0055 × 2060 × 002E × 2060 × 0053 × 2060 × 002E × 2060 × 0041 × 2060 × 0300 × 003F × 2060 × 0020 × 2060 ÷ 0048 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER U (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER S (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] QUESTION MARK (STerm) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LATIN CAPITAL LETTER H (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0055 × 2060 × 002E × 2060 × 0053 × 2060 × 002E × 2060 × 0041 × 2060 × 0300 × 002E × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER U (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER S (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0033 × 2060 × 002E × 2060 × 0034 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] DIGIT THREE (Numeric) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [6.0] DIGIT FOUR (Numeric) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0063 × 2060 × 002E × 2060 × 0064 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0043 × 2060 × 002E × 2060 × 0064 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER C (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0063 × 2060 × 002E × 2060 × 0044 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER D (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0043 × 2060 × 002E × 2060 × 0044 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER C (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER D (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 ÷ 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 2018 × 2060 × 0028 × 2060 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LEFT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 ÷ 2018 × 2060 × 0028 × 2060 × 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LEFT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 0308 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 0308 ÷ 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 0308 ÷ 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 000A ÷ 2060 × 0308 × 2060 × 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] (LF) ÷ [4.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 0020 × 2060 × 0072 × 2060 × 0065 × 2060 × 0073 × 2060 × 0070 × 2060 × 002E × 2060 × 0020 × 2060 × 006C × 2060 × 0065 × 2060 × 0061 × 2060 × 0064 × 2060 × 0065 × 2060 × 0072 × 2060 × 0073 × 2060 × 0020 × 2060 × 0061 × 2060 × 0072 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER R (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER S (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER P (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER L (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER A (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER R (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER S (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER A (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER R (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 5B57 × 2060 × 002E × 2060 ÷ 5B57 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 ÷ 5B83 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 3002 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.1] IDEOGRAPHIC FULL STOP (STerm) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 5B57 × 2060 × 3002 × 2060 ÷ 5B83 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [5.0] WORD JOINER (Format_FE) × [998.0] IDEOGRAPHIC FULL STOP (STerm) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 2060 × 0021 × 2060 × 0020 × 2060 × 0020 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] EXCLAMATION MARK (STerm) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [10.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] -# -# Lines: 502 -# -# EOF diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/WordBreakTest.txt cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/WordBreakTest.txt --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/WordBreakTest.txt 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/data/WordBreakTest.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,1851 +0,0 @@ -# WordBreakTest-12.1.0.txt -# Date: 2019-03-10, 10:53:29 GMT -# © 2019 Unicode®, Inc. -# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. -# For terms of use, see http://www.unicode.org/terms_of_use.html -# -# Unicode Character Database -# For documentation, see http://www.unicode.org/reports/tr44/ -# -# Default Word_Break Test -# -# Format: -# (# )? -# contains hex Unicode code points, with -# ÷ wherever there is a break opportunity, and -# × wherever there is not. -# the format can change, but currently it shows: -# - the sample character name -# - (x) the Word_Break property value for the sample character -# - [x] the rule that determines whether there is a break or not, -# as listed in the Rules section of WordBreakTest.html -# -# These samples may be extended or changed in the future. -# -÷ 0001 ÷ 0001 ÷ # ÷ [0.2] (Other) ÷ [999.0] (Other) ÷ [0.3] -÷ 0001 × 0308 ÷ 0001 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0001 ÷ 000D ÷ # ÷ [0.2] (Other) ÷ [3.2] (CR) ÷ [0.3] -÷ 0001 × 0308 ÷ 000D ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0001 ÷ 000A ÷ # ÷ [0.2] (Other) ÷ [3.2] (LF) ÷ [0.3] -÷ 0001 × 0308 ÷ 000A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0001 ÷ 000B ÷ # ÷ [0.2] (Other) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0001 × 0308 ÷ 000B ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0001 ÷ 3031 ÷ # ÷ [0.2] (Other) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0001 × 0308 ÷ 3031 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0001 ÷ 0041 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0001 × 0308 ÷ 0041 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0001 ÷ 003A ÷ # ÷ [0.2] (Other) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0001 × 0308 ÷ 003A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0001 ÷ 002C ÷ # ÷ [0.2] (Other) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0001 × 0308 ÷ 002C ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0001 ÷ 002E ÷ # ÷ [0.2] (Other) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0001 × 0308 ÷ 002E ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0001 ÷ 0030 ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0001 × 0308 ÷ 0030 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0001 ÷ 005F ÷ # ÷ [0.2] (Other) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0001 × 0308 ÷ 005F ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0001 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0001 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0001 ÷ 05D0 ÷ # ÷ [0.2] (Other) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0001 × 0308 ÷ 05D0 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0001 ÷ 0022 ÷ # ÷ [0.2] (Other) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0001 × 0308 ÷ 0022 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0001 ÷ 0027 ÷ # ÷ [0.2] (Other) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0001 × 0308 ÷ 0027 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0001 ÷ 231A ÷ # ÷ [0.2] (Other) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0001 × 0308 ÷ 231A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0001 ÷ 0020 ÷ # ÷ [0.2] (Other) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0001 × 0308 ÷ 0020 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0001 × 00AD ÷ # ÷ [0.2] (Other) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0001 × 0308 × 00AD ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0001 × 0300 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0001 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0001 × 200D ÷ # ÷ [0.2] (Other) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0001 × 0308 × 200D ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0001 ÷ 0061 × 2060 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0001 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0001 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0001 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0001 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0001 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0001 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0001 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0001 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0001 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0001 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0001 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0001 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0001 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0001 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0001 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0001 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0001 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [3.1] (Other) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [3.1] (CR) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 000D ÷ 000B ÷ # ÷ [0.2] (CR) ÷ [3.1] (Newline) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 000B ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 000D ÷ 3031 ÷ # ÷ [0.2] (CR) ÷ [3.1] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 3031 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 000D ÷ 0041 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0041 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 000D ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COLON (MidLetter) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000D ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMMA (MidNum) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000D ÷ 002E ÷ # ÷ [0.2] (CR) ÷ [3.1] FULL STOP (MidNumLet) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 002E ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 000D ÷ 0030 ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0030 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000D ÷ 005F ÷ # ÷ [0.2] (CR) ÷ [3.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 005F ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 000D ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [3.1] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000D ÷ 05D0 ÷ # ÷ [0.2] (CR) ÷ [3.1] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 05D0 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 000D ÷ 0022 ÷ # ÷ [0.2] (CR) ÷ [3.1] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0022 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 000D ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000D ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [3.1] WATCH (ExtPict) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 000D ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [3.1] SPACE (WSegSpace) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 000D ÷ 00AD ÷ # ÷ [0.2] (CR) ÷ [3.1] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000D ÷ 0308 × 00AD ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000D ÷ 200D ÷ # ÷ [0.2] (CR) ÷ [3.1] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 000D ÷ 0308 × 200D ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 000D ÷ 0061 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000D ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000D ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000D ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000D ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000D ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000D ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D ÷ 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [3.1] (Other) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [3.1] (CR) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [3.1] (LF) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 000A ÷ 000B ÷ # ÷ [0.2] (LF) ÷ [3.1] (Newline) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 000B ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 000A ÷ 3031 ÷ # ÷ [0.2] (LF) ÷ [3.1] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 3031 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 000A ÷ 0041 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0041 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 000A ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COLON (MidLetter) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000A ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMMA (MidNum) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000A ÷ 002E ÷ # ÷ [0.2] (LF) ÷ [3.1] FULL STOP (MidNumLet) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 002E ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 000A ÷ 0030 ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0030 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000A ÷ 005F ÷ # ÷ [0.2] (LF) ÷ [3.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 005F ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 000A ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [3.1] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000A ÷ 05D0 ÷ # ÷ [0.2] (LF) ÷ [3.1] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 05D0 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 000A ÷ 0022 ÷ # ÷ [0.2] (LF) ÷ [3.1] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0022 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 000A ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000A ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [3.1] WATCH (ExtPict) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 000A ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [3.1] SPACE (WSegSpace) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 000A ÷ 00AD ÷ # ÷ [0.2] (LF) ÷ [3.1] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000A ÷ 0308 × 00AD ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000A ÷ 200D ÷ # ÷ [0.2] (LF) ÷ [3.1] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 000A ÷ 0308 × 200D ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 000A ÷ 0061 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000A ÷ 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000B ÷ 0001 ÷ # ÷ [0.2] (Newline) ÷ [3.1] (Other) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 000B ÷ 000D ÷ # ÷ [0.2] (Newline) ÷ [3.1] (CR) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 000B ÷ 000A ÷ # ÷ [0.2] (Newline) ÷ [3.1] (LF) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 000B ÷ 000B ÷ # ÷ [0.2] (Newline) ÷ [3.1] (Newline) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 000B ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 000B ÷ 3031 ÷ # ÷ [0.2] (Newline) ÷ [3.1] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 3031 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 000B ÷ 0041 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0041 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 000B ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COLON (MidLetter) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000B ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMMA (MidNum) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000B ÷ 002E ÷ # ÷ [0.2] (Newline) ÷ [3.1] FULL STOP (MidNumLet) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 002E ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 000B ÷ 0030 ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0030 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 000B ÷ 005F ÷ # ÷ [0.2] (Newline) ÷ [3.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 005F ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 000B ÷ 1F1E6 ÷ # ÷ [0.2] (Newline) ÷ [3.1] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 000B ÷ 05D0 ÷ # ÷ [0.2] (Newline) ÷ [3.1] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 05D0 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 000B ÷ 0022 ÷ # ÷ [0.2] (Newline) ÷ [3.1] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0022 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 000B ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000B ÷ 231A ÷ # ÷ [0.2] (Newline) ÷ [3.1] WATCH (ExtPict) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 000B ÷ 0020 ÷ # ÷ [0.2] (Newline) ÷ [3.1] SPACE (WSegSpace) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 000B ÷ 00AD ÷ # ÷ [0.2] (Newline) ÷ [3.1] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000B ÷ 0308 × 00AD ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 000B ÷ 0300 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000B ÷ 0308 × 0300 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 000B ÷ 200D ÷ # ÷ [0.2] (Newline) ÷ [3.1] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 000B ÷ 0308 × 200D ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 000B ÷ 0061 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000B ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000B ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000B ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000B ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000B ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 000B ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 000B ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 000B ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000B ÷ 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 3031 ÷ 0001 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] (Other) ÷ [0.3] -÷ 3031 × 0308 ÷ 0001 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 3031 ÷ 000D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [3.2] (CR) ÷ [0.3] -÷ 3031 × 0308 ÷ 000D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 3031 ÷ 000A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [3.2] (LF) ÷ [0.3] -÷ 3031 × 0308 ÷ 000A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 3031 ÷ 000B ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [3.2] (Newline) ÷ [0.3] -÷ 3031 × 0308 ÷ 000B ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 3031 × 3031 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 3031 × 0308 × 3031 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 3031 ÷ 0041 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 3031 × 0308 ÷ 0041 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 3031 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 3031 × 0308 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 3031 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 3031 × 0308 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 3031 ÷ 002E ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 3031 × 0308 ÷ 002E ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 3031 ÷ 0030 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 3031 × 0308 ÷ 0030 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 3031 × 005F ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 3031 × 0308 × 005F ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 3031 ÷ 1F1E6 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 3031 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 3031 ÷ 05D0 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 3031 × 0308 ÷ 05D0 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 3031 ÷ 0022 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 3031 × 0308 ÷ 0022 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 3031 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 3031 × 0308 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 3031 ÷ 231A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 3031 × 0308 ÷ 231A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 3031 ÷ 0020 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 3031 × 0308 ÷ 0020 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 3031 × 00AD ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 3031 × 0308 × 00AD ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 3031 × 0300 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 3031 × 0308 × 0300 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 3031 × 200D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 3031 × 0308 × 200D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 3031 ÷ 0061 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 3031 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 3031 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 3031 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 3031 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 3031 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 3031 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 3031 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 3031 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 3031 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 3031 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 3031 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 3031 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 3031 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 3031 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 3031 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 3031 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 3031 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0041 ÷ 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] (Other) ÷ [0.3] -÷ 0041 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0041 ÷ 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [3.2] (CR) ÷ [0.3] -÷ 0041 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0041 ÷ 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [3.2] (LF) ÷ [0.3] -÷ 0041 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0041 ÷ 000B ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0041 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0041 ÷ 3031 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0041 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0041 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0041 × 0308 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0041 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0041 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0041 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0041 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0041 ÷ 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0041 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0041 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0041 × 0308 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0041 × 005F ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0041 × 0308 × 005F ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0041 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0041 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0041 × 05D0 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0041 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0041 ÷ 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0041 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0041 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0041 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0041 ÷ 231A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0041 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0041 ÷ 0020 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0041 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0041 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0041 × 0308 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0041 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0041 × 0308 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0041 × 200D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0041 × 0308 × 200D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0041 × 0061 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0041 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0041 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0041 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0041 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0041 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0041 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0041 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0041 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0041 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0041 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0041 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0041 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0041 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0041 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0041 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0041 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0041 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 003A ÷ 0001 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] (Other) ÷ [0.3] -÷ 003A × 0308 ÷ 0001 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 003A ÷ 000D ÷ # ÷ [0.2] COLON (MidLetter) ÷ [3.2] (CR) ÷ [0.3] -÷ 003A × 0308 ÷ 000D ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 003A ÷ 000A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [3.2] (LF) ÷ [0.3] -÷ 003A × 0308 ÷ 000A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 003A ÷ 000B ÷ # ÷ [0.2] COLON (MidLetter) ÷ [3.2] (Newline) ÷ [0.3] -÷ 003A × 0308 ÷ 000B ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 003A ÷ 3031 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 003A × 0308 ÷ 3031 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 003A ÷ 0041 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 003A × 0308 ÷ 0041 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 003A ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 003A × 0308 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 003A ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 003A × 0308 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 003A ÷ 002E ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 003A × 0308 ÷ 002E ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 003A ÷ 0030 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 003A × 0308 ÷ 0030 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 003A ÷ 005F ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 003A × 0308 ÷ 005F ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 003A ÷ 1F1E6 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 003A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 003A ÷ 05D0 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 003A × 0308 ÷ 05D0 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 003A ÷ 0022 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 003A × 0308 ÷ 0022 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 003A ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 003A × 0308 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 003A ÷ 231A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 003A × 0308 ÷ 231A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 003A ÷ 0020 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 003A × 0308 ÷ 0020 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 003A × 00AD ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 003A × 0308 × 00AD ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 003A × 0300 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 003A × 0308 × 0300 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 003A × 200D ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 003A × 0308 × 200D ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 003A ÷ 0061 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 003A × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 003A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 003A × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 003A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 003A × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 003A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 003A × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 003A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 003A × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 003A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 003A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 003A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 003A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 003A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 003A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 003A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 003A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002C ÷ 0001 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] (Other) ÷ [0.3] -÷ 002C × 0308 ÷ 0001 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 002C ÷ 000D ÷ # ÷ [0.2] COMMA (MidNum) ÷ [3.2] (CR) ÷ [0.3] -÷ 002C × 0308 ÷ 000D ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 002C ÷ 000A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [3.2] (LF) ÷ [0.3] -÷ 002C × 0308 ÷ 000A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 002C ÷ 000B ÷ # ÷ [0.2] COMMA (MidNum) ÷ [3.2] (Newline) ÷ [0.3] -÷ 002C × 0308 ÷ 000B ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 002C ÷ 3031 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 002C × 0308 ÷ 3031 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 002C ÷ 0041 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 002C × 0308 ÷ 0041 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 002C ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002C × 0308 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002C ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002C × 0308 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002C ÷ 002E ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 002C × 0308 ÷ 002E ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 002C ÷ 0030 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002C × 0308 ÷ 0030 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002C ÷ 005F ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 002C × 0308 ÷ 005F ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 002C ÷ 1F1E6 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 002C × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 002C ÷ 05D0 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 002C × 0308 ÷ 05D0 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 002C ÷ 0022 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 002C × 0308 ÷ 0022 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 002C ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002C × 0308 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002C ÷ 231A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 002C × 0308 ÷ 231A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 002C ÷ 0020 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 002C × 0308 ÷ 0020 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 002C × 00AD ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002C × 0300 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002C × 200D ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 002C × 0308 × 200D ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 002C ÷ 0061 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002C × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002C ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002C × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002C ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002C × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002C ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002C × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002C ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002C × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002C ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002C × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002C ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002C × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002C ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002C × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002C ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002C × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002E ÷ 0001 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] (Other) ÷ [0.3] -÷ 002E × 0308 ÷ 0001 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 002E ÷ 000D ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [3.2] (CR) ÷ [0.3] -÷ 002E × 0308 ÷ 000D ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 002E ÷ 000A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [3.2] (LF) ÷ [0.3] -÷ 002E × 0308 ÷ 000A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 002E ÷ 000B ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [3.2] (Newline) ÷ [0.3] -÷ 002E × 0308 ÷ 000B ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 002E ÷ 3031 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 002E × 0308 ÷ 3031 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 002E ÷ 0041 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 002E × 0308 ÷ 0041 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 002E ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002E × 0308 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002E ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002E × 0308 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002E ÷ 002E ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 002E × 0308 ÷ 002E ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 002E ÷ 0030 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002E × 0308 ÷ 0030 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 002E ÷ 005F ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 002E × 0308 ÷ 005F ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 002E ÷ 1F1E6 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 002E × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 002E ÷ 05D0 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 002E × 0308 ÷ 05D0 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 002E ÷ 0022 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 002E × 0308 ÷ 0022 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 002E ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002E × 0308 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002E ÷ 231A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 002E × 0308 ÷ 231A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 002E ÷ 0020 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 002E × 0308 ÷ 0020 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 002E × 00AD ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002E × 0308 × 00AD ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 002E × 0300 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002E × 0308 × 0300 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 002E × 200D ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 002E × 0308 × 200D ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 002E ÷ 0061 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002E × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002E ÷ 0061 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002E × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002E ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002E × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002E ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002E × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002E ÷ 0061 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002E × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002E ÷ 0031 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002E × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 002E ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002E × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 002E ÷ 0031 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002E × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 002E ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 002E × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0030 ÷ 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] (Other) ÷ [0.3] -÷ 0030 × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0030 ÷ 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [3.2] (CR) ÷ [0.3] -÷ 0030 × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0030 ÷ 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [3.2] (LF) ÷ [0.3] -÷ 0030 × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0030 ÷ 000B ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0030 × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0030 ÷ 3031 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0030 × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0030 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0030 × 0308 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0030 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0030 × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0030 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0030 × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0030 ÷ 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0030 × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0030 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0030 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0030 × 005F ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0030 × 0308 × 005F ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0030 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0030 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0030 × 05D0 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0030 × 0308 × 05D0 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0030 ÷ 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0030 × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0030 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0030 × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0030 ÷ 231A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0030 × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0030 ÷ 0020 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0030 × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0030 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0030 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0030 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0030 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0030 × 200D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0030 × 0308 × 200D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0030 × 0061 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0030 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0030 × 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0030 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0030 × 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0030 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0030 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0030 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0030 × 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0030 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0030 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0030 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0030 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0030 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0030 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0030 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0030 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0030 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 005F ÷ 0001 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] (Other) ÷ [0.3] -÷ 005F × 0308 ÷ 0001 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 005F ÷ 000D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [3.2] (CR) ÷ [0.3] -÷ 005F × 0308 ÷ 000D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 005F ÷ 000A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [3.2] (LF) ÷ [0.3] -÷ 005F × 0308 ÷ 000A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 005F ÷ 000B ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [3.2] (Newline) ÷ [0.3] -÷ 005F × 0308 ÷ 000B ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 005F × 3031 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 005F × 0308 × 3031 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 005F × 0041 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 005F × 0308 × 0041 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 005F ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 005F × 0308 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 005F ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 005F × 0308 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 005F ÷ 002E ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 005F × 0308 ÷ 002E ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 005F × 0030 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 005F × 0308 × 0030 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 005F × 005F ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 005F × 0308 × 005F ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 005F ÷ 1F1E6 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 005F × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 005F × 05D0 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 005F × 0308 × 05D0 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 005F ÷ 0022 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 005F × 0308 ÷ 0022 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 005F ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 005F × 0308 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 005F ÷ 231A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 005F × 0308 ÷ 231A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 005F ÷ 0020 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 005F × 0308 ÷ 0020 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 005F × 00AD ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 005F × 0308 × 00AD ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 005F × 0300 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 005F × 0308 × 0300 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 005F × 200D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 005F × 0308 × 200D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 005F × 0061 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 005F × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 005F × 0061 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 005F × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 005F × 0061 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 005F × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 005F × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 005F × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 005F × 0061 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 005F × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 005F × 0031 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 005F × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 005F × 0031 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 005F × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 005F × 0031 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 005F × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 005F × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 005F × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 1F1E6 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] (Other) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 1F1E6 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [3.2] (CR) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 1F1E6 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [3.2] (LF) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 1F1E6 ÷ 000B ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [3.2] (Newline) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 000B ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 1F1E6 ÷ 3031 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 3031 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 1F1E6 ÷ 0041 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0041 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 1F1E6 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 1F1E6 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 1F1E6 ÷ 002E ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 002E ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 1F1E6 ÷ 0030 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0030 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 1F1E6 ÷ 005F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 005F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 1F1E6 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [15.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1F1E6 × 0308 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [15.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 1F1E6 ÷ 05D0 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 05D0 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 1F1E6 ÷ 0022 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0022 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 1F1E6 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 1F1E6 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 1F1E6 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 1F1E6 × 00AD ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 1F1E6 × 0308 × 00AD ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 1F1E6 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 1F1E6 × 0308 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 1F1E6 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 1F1E6 × 0308 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 1F1E6 ÷ 0061 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 1F1E6 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 1F1E6 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 1F1E6 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 1F1E6 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 1F1E6 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 1F1E6 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 1F1E6 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 1F1E6 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 1F1E6 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 05D0 ÷ 0001 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] (Other) ÷ [0.3] -÷ 05D0 × 0308 ÷ 0001 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 05D0 ÷ 000D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [3.2] (CR) ÷ [0.3] -÷ 05D0 × 0308 ÷ 000D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 05D0 ÷ 000A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [3.2] (LF) ÷ [0.3] -÷ 05D0 × 0308 ÷ 000A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 05D0 ÷ 000B ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [3.2] (Newline) ÷ [0.3] -÷ 05D0 × 0308 ÷ 000B ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 05D0 ÷ 3031 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 05D0 × 0308 ÷ 3031 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 05D0 × 0041 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 05D0 × 0308 × 0041 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 05D0 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 05D0 × 0308 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 05D0 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 05D0 × 0308 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 05D0 ÷ 002E ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 05D0 × 0308 ÷ 002E ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 05D0 × 0030 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 05D0 × 0308 × 0030 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 05D0 × 005F ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 05D0 × 0308 × 005F ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 05D0 ÷ 1F1E6 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 05D0 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 05D0 × 05D0 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 05D0 × 0308 × 05D0 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 05D0 ÷ 0022 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 05D0 × 0308 ÷ 0022 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 05D0 × 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [7.1] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 × 0308 × 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.1] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 ÷ 231A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 05D0 × 0308 ÷ 231A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 05D0 ÷ 0020 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 05D0 × 0308 ÷ 0020 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 05D0 × 00AD ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 05D0 × 0308 × 00AD ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 05D0 × 0300 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 05D0 × 0308 × 0300 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 05D0 × 200D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 05D0 × 0308 × 200D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 05D0 × 0061 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 05D0 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 05D0 × 0061 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 05D0 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 05D0 × 0061 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 05D0 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 05D0 × 0061 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 05D0 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 05D0 × 0031 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 05D0 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 05D0 × 0031 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 × 0031 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 05D0 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 05D0 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 05D0 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0022 ÷ 0001 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] (Other) ÷ [0.3] -÷ 0022 × 0308 ÷ 0001 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0022 ÷ 000D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [3.2] (CR) ÷ [0.3] -÷ 0022 × 0308 ÷ 000D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0022 ÷ 000A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [3.2] (LF) ÷ [0.3] -÷ 0022 × 0308 ÷ 000A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0022 ÷ 000B ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0022 × 0308 ÷ 000B ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0022 ÷ 3031 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0022 × 0308 ÷ 3031 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0022 ÷ 0041 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0022 × 0308 ÷ 0041 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0022 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0022 × 0308 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0022 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0022 × 0308 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0022 ÷ 002E ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0022 × 0308 ÷ 002E ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0022 ÷ 0030 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0022 × 0308 ÷ 0030 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0022 ÷ 005F ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0022 × 0308 ÷ 005F ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0022 ÷ 1F1E6 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0022 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0022 ÷ 05D0 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0022 × 0308 ÷ 05D0 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0022 ÷ 0022 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0022 × 0308 ÷ 0022 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0022 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0022 × 0308 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0022 ÷ 231A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0022 × 0308 ÷ 231A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0022 ÷ 0020 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0022 × 0308 ÷ 0020 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0022 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0022 × 0308 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0022 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0022 × 0308 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0022 × 200D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0022 × 0308 × 200D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0022 ÷ 0061 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0022 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0022 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0022 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0022 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0022 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0022 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0022 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0022 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0022 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0022 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0022 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0022 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0022 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0022 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0022 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0022 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0022 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0027 ÷ 0001 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] (Other) ÷ [0.3] -÷ 0027 × 0308 ÷ 0001 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0027 ÷ 000D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [3.2] (CR) ÷ [0.3] -÷ 0027 × 0308 ÷ 000D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0027 ÷ 000A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [3.2] (LF) ÷ [0.3] -÷ 0027 × 0308 ÷ 000A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0027 ÷ 000B ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0027 × 0308 ÷ 000B ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0027 ÷ 3031 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0027 × 0308 ÷ 3031 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0027 ÷ 0041 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0027 × 0308 ÷ 0041 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0027 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0027 × 0308 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0027 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0027 × 0308 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0027 ÷ 002E ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0027 × 0308 ÷ 002E ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0027 ÷ 0030 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0027 × 0308 ÷ 0030 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0027 ÷ 005F ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0027 × 0308 ÷ 005F ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0027 ÷ 1F1E6 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0027 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0027 ÷ 05D0 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0027 × 0308 ÷ 05D0 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0027 ÷ 0022 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0027 × 0308 ÷ 0022 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0027 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0027 × 0308 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0027 ÷ 231A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0027 × 0308 ÷ 231A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0027 ÷ 0020 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0027 × 0308 ÷ 0020 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0027 × 00AD ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0027 × 0308 × 00AD ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0027 × 0300 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0027 × 0308 × 0300 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0027 × 200D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0027 × 0308 × 200D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0027 ÷ 0061 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0027 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0027 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0027 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0027 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0027 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0027 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0027 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0027 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0027 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0027 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0027 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0027 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0027 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0027 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0027 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0027 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0027 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 231A ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] (Other) ÷ [0.3] -÷ 231A × 0308 ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 231A ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [3.2] (CR) ÷ [0.3] -÷ 231A × 0308 ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 231A ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [3.2] (LF) ÷ [0.3] -÷ 231A × 0308 ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 231A ÷ 000B ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [3.2] (Newline) ÷ [0.3] -÷ 231A × 0308 ÷ 000B ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 231A ÷ 3031 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 231A × 0308 ÷ 3031 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 231A ÷ 0041 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 231A × 0308 ÷ 0041 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 231A ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 231A × 0308 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 231A ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 231A × 0308 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 231A ÷ 002E ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 231A × 0308 ÷ 002E ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 231A ÷ 0030 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 231A × 0308 ÷ 0030 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 231A ÷ 005F ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 231A × 0308 ÷ 005F ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 231A ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 231A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 231A ÷ 05D0 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 231A × 0308 ÷ 05D0 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 231A ÷ 0022 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 231A × 0308 ÷ 0022 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 231A ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 231A × 0308 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 231A ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 231A × 0308 ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 231A ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 231A × 0308 ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 231A × 00AD ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 231A × 0308 × 00AD ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 231A × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 231A × 0308 × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 231A × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 231A × 0308 × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 231A ÷ 0061 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 231A × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 231A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 231A × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 231A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 231A × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 231A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 231A × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 231A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 231A × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 231A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 231A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 231A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 231A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 231A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 231A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 231A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 231A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0020 ÷ 0001 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] (Other) ÷ [0.3] -÷ 0020 × 0308 ÷ 0001 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0020 ÷ 000D ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [3.2] (CR) ÷ [0.3] -÷ 0020 × 0308 ÷ 000D ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0020 ÷ 000A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [3.2] (LF) ÷ [0.3] -÷ 0020 × 0308 ÷ 000A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0020 ÷ 000B ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0020 × 0308 ÷ 000B ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0020 ÷ 3031 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0020 × 0308 ÷ 3031 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0020 ÷ 0041 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0020 × 0308 ÷ 0041 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0020 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0020 × 0308 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0020 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0020 × 0308 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0020 ÷ 002E ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0020 × 0308 ÷ 002E ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0020 ÷ 0030 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0020 × 0308 ÷ 0030 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0020 ÷ 005F ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0020 × 0308 ÷ 005F ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0020 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0020 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0020 ÷ 05D0 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0020 × 0308 ÷ 05D0 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0020 ÷ 0022 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0020 × 0308 ÷ 0022 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0020 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0020 × 0308 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0020 ÷ 231A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0020 × 0308 ÷ 231A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0020 × 0020 ÷ # ÷ [0.2] SPACE (WSegSpace) × [3.4] SPACE (WSegSpace) ÷ [0.3] -÷ 0020 × 0308 ÷ 0020 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0020 × 00AD ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0020 × 0308 × 00AD ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0020 × 0300 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0020 × 0308 × 0300 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0020 × 200D ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0020 × 0308 × 200D ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0020 ÷ 0061 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0020 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0020 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0020 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0020 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0020 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0020 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0020 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0020 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0020 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0020 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0020 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0020 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0020 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0020 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0020 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0020 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0020 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 00AD ÷ 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 00AD × 0308 ÷ 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 00AD ÷ 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 00AD × 0308 ÷ 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 00AD ÷ 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 00AD × 0308 ÷ 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 00AD ÷ 000B ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 00AD × 0308 ÷ 000B ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 00AD ÷ 3031 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 00AD × 0308 ÷ 3031 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 00AD ÷ 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 00AD × 0308 ÷ 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 00AD ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 00AD × 0308 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 00AD ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 00AD × 0308 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 00AD ÷ 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 00AD × 0308 ÷ 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 00AD ÷ 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 00AD × 0308 ÷ 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 00AD ÷ 005F ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 00AD × 0308 ÷ 005F ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 00AD ÷ 1F1E6 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 00AD × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 00AD ÷ 05D0 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 00AD × 0308 ÷ 05D0 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 00AD ÷ 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 00AD × 0308 ÷ 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 00AD ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 00AD × 0308 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 00AD ÷ 231A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 00AD × 0308 ÷ 231A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 00AD ÷ 0020 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 00AD × 0308 ÷ 0020 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 00AD × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 00AD × 0308 × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 00AD × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 00AD × 0308 × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 00AD × 200D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 00AD × 0308 × 200D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 00AD ÷ 0061 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 00AD × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 00AD ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 00AD × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 00AD ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 00AD × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 00AD ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 00AD × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 00AD ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 00AD × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 00AD ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 00AD × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 00AD ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 00AD × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 00AD ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 00AD × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 00AD ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 00AD × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0300 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0300 × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0300 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0300 × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0300 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0300 × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0300 ÷ 000B ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0300 × 0308 ÷ 000B ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0300 ÷ 3031 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0300 × 0308 ÷ 3031 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0300 ÷ 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0300 × 0308 ÷ 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0300 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0300 × 0308 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0300 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0300 × 0308 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0300 ÷ 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0300 × 0308 ÷ 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0300 ÷ 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0300 × 0308 ÷ 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0300 ÷ 005F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0300 × 0308 ÷ 005F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0300 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0300 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0300 ÷ 05D0 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0300 × 0308 ÷ 05D0 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0300 ÷ 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0300 × 0308 ÷ 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0300 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0300 × 0308 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0300 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0300 × 0308 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0300 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0300 × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0300 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0300 × 0308 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0300 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0300 × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0300 ÷ 0061 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0300 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0300 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0300 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0300 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0300 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0300 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0300 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0300 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0300 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0300 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0300 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0300 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0300 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0300 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0300 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0300 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0300 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 200D ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 200D × 0308 ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 200D ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 200D × 0308 ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 200D ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 200D × 0308 ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 200D ÷ 000B ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 200D × 0308 ÷ 000B ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 200D ÷ 3031 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 200D × 0308 ÷ 3031 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 200D ÷ 0041 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 200D × 0308 ÷ 0041 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 200D ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 200D × 0308 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 200D ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 200D × 0308 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 200D ÷ 002E ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 200D × 0308 ÷ 002E ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 200D ÷ 0030 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 200D × 0308 ÷ 0030 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 200D ÷ 005F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 200D × 0308 ÷ 005F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 200D ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 200D × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 200D ÷ 05D0 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 200D × 0308 ÷ 05D0 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 200D ÷ 0022 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 200D × 0308 ÷ 0022 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 200D ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 200D × 0308 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 200D × 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] WATCH (ExtPict) ÷ [0.3] -÷ 200D × 0308 ÷ 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 200D ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 200D × 0308 ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 200D × 00AD ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 200D × 0308 × 00AD ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 200D × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 200D × 0308 × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 200D × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 200D × 0308 × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 200D ÷ 0061 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 200D × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 200D ÷ 0061 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 200D × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 200D ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 200D × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 200D ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 200D × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 200D ÷ 0061 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 200D × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 200D ÷ 0031 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 200D × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 200D ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 200D × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 200D ÷ 0031 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 200D × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 200D ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 200D × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 2060 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 × 2060 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 × 2060 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 × 2060 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 × 2060 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 × 2060 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 2060 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 2060 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 2060 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 × 2060 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 × 2060 × 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 × 2060 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 × 2060 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 × 2060 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 × 2060 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 2060 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 × 2060 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 × 2060 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 × 2060 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 × 2060 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 × 2060 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 2060 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 2060 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 2060 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 2060 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 2060 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 2060 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 2060 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 × 003A × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 003A × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 × 003A × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 × 003A × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 003A × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 003A × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 003A × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 × 003A × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 003A × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 003A × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 003A × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 003A × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 003A × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 003A × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 003A × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 003A × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 003A × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 × 0027 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 × 0027 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 0027 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 × 0027 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 0027 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 0027 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 0027 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0061 ÷ 002C × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 003A × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 × 0027 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 × 0027 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 0027 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 0027 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 0027 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 × 0027 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 × 0027 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 × 0027 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 × 0027 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 0027 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 0027 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 × 0027 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 × 002C × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 × 002C × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 002C × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 002C × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 × 002C × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 × 002C × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 × 002C × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 × 002C × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 002C × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 002C × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 × 002C × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] -÷ 000D × 000A ÷ 0061 ÷ 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [3.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] -÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] -÷ 0020 × 200D ÷ 0646 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] ARABIC LETTER NOON (ALetter) ÷ [0.3] -÷ 0646 × 200D ÷ 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] -÷ 0041 × 0041 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0041 × 003A × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0041 ÷ 003A ÷ 003A ÷ 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 05D0 × 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [7.1] APOSTROPHE (Single_Quote) ÷ [0.3] -÷ 05D0 × 0022 × 05D0 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [7.2] QUOTATION MARK (Double_Quote) × [7.3] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] -÷ 0041 × 0030 × 0030 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ZERO (Numeric) × [8.0] DIGIT ZERO (Numeric) × [10.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 0030 × 002C × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 0030 ÷ 002C ÷ 002C ÷ 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] -÷ 3031 × 3031 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] -÷ 0041 × 005F × 0030 × 005F × 3031 × 005F ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ZERO (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] -÷ 0041 × 005F × 005F × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] -÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [15.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 1F1E7 × 200D ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 200D × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 × 1F1E9 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER D (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 1F476 × 1F3FF ÷ 1F476 ÷ # ÷ [0.2] BABY (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [999.0] BABY (ExtPict) ÷ [0.3] -÷ 1F6D1 × 200D × 1F6D1 ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 0061 × 200D × 1F6D1 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 2701 × 200D × 2701 ÷ # ÷ [0.2] UPPER BLADE SCISSORS (Other) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] UPPER BLADE SCISSORS (Other) ÷ [0.3] -÷ 0061 × 200D × 2701 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] UPPER BLADE SCISSORS (Other) ÷ [0.3] -÷ 1F476 × 1F3FF × 0308 × 200D × 1F476 × 1F3FF ÷ # ÷ [0.2] BABY (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] BABY (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [0.3] -÷ 1F6D1 × 1F3FF ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [0.3] -÷ 200D × 1F6D1 × 1F3FF ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [0.3] -÷ 200D × 1F6D1 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 200D × 1F6D1 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 1F6D1 ÷ 1F6D1 ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) ÷ [999.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] -÷ 0061 × 0308 × 200D × 0308 × 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 0061 ÷ 0020 × 0020 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] SPACE (WSegSpace) × [3.4] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0031 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0031 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] -÷ 0061 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -÷ 0061 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] -# -# Lines: 1823 -# -# EOF Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name GRAPHEME_BREAK_FWD --sparse --minimize --anchored --state-size 2 src/unicode/fsm/ [snip (arg too long)] -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref GRAPHEME_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("grapheme_break_fwd.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref GRAPHEME_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("grapheme_break_fwd.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name GRAPHEME_BREAK_REV --reverse --longest --sparse --minimize --anchored --state-size 2 src/unicode/fsm/ [snip (arg too long)] -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref GRAPHEME_BREAK_REV: ::regex_automata::SparseDFA<&'static [u8], u16> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("grapheme_break_rev.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref GRAPHEME_BREAK_REV: ::regex_automata::SparseDFA<&'static [u8], u16> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("grapheme_break_rev.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/mod.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -pub mod grapheme_break_fwd; -pub mod grapheme_break_rev; -pub mod regional_indicator_rev; -pub mod sentence_break_fwd; -pub mod simple_word_fwd; -pub mod whitespace_anchored_fwd; -pub mod whitespace_anchored_rev; -pub mod word_break_fwd; Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name REGIONAL_INDICATOR_REV --reverse --classes --minimize --anchored --premultiply --state-size 1 src/unicode/fsm/ \p{gcb=Regional_Indicator} -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref REGIONAL_INDICATOR_REV: ::regex_automata::DenseDFA<&'static [u8], u8> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("regional_indicator_rev.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref REGIONAL_INDICATOR_REV: ::regex_automata::DenseDFA<&'static [u8], u8> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("regional_indicator_rev.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name SENTENCE_BREAK_FWD --minimize --sparse --anchored --state-size 4 src/unicode/fsm/ [snip (arg too long)] -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref SENTENCE_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("sentence_break_fwd.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref SENTENCE_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("sentence_break_fwd.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/simple_word_fwd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name SIMPLE_WORD_FWD --sparse --minimize --state-size 2 src/unicode/fsm/ \w -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref SIMPLE_WORD_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("simple_word_fwd.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref SIMPLE_WORD_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("simple_word_fwd.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name WHITESPACE_ANCHORED_FWD --anchored --classes --premultiply --minimize --state-size 1 src/unicode/fsm/ \s+ -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref WHITESPACE_ANCHORED_FWD: ::regex_automata::DenseDFA<&'static [u8], u8> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("whitespace_anchored_fwd.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref WHITESPACE_ANCHORED_FWD: ::regex_automata::DenseDFA<&'static [u8], u8> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("whitespace_anchored_fwd.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name WHITESPACE_ANCHORED_REV --reverse --anchored --classes --premultiply --minimize --state-size 2 src/unicode/fsm/ \s+ -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref WHITESPACE_ANCHORED_REV: ::regex_automata::DenseDFA<&'static [u16], u16> = { - #[repr(C)] - struct Aligned { - _align: [u16; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("whitespace_anchored_rev.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref WHITESPACE_ANCHORED_REV: ::regex_automata::DenseDFA<&'static [u16], u16> = { - #[repr(C)] - struct Aligned { - _align: [u16; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("whitespace_anchored_rev.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.bigendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.bigendian.dfa differ Binary files /tmp/tmp96qid8wf/P2mKBDGjc4/cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.littleendian.dfa and /tmp/tmp96qid8wf/3VMRcIQcnm/cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.littleendian.dfa differ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/fsm/word_break_fwd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: -// -// ucd-generate dfa --name WORD_BREAK_FWD --sparse --minimize --anchored --state-size 4 src/unicode/fsm/ [snip (arg too long)] -// -// ucd-generate 0.2.9 is available on crates.io. - -#[cfg(target_endian = "big")] -lazy_static::lazy_static! { - pub static ref WORD_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("word_break_fwd.bigendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} - -#[cfg(target_endian = "little")] -lazy_static::lazy_static! { - pub static ref WORD_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { - #[repr(C)] - struct Aligned { - _align: [u8; 0], - bytes: B, - } - - static ALIGNED: &'static Aligned<[u8]> = &Aligned { - _align: [], - bytes: *include_bytes!("word_break_fwd.littleendian.dfa"), - }; - - unsafe { - ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) - } - }; -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/grapheme.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/grapheme.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/grapheme.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/grapheme.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,355 +0,0 @@ -use regex_automata::DFA; - -use crate::ext_slice::ByteSlice; -use crate::unicode::fsm::grapheme_break_fwd::GRAPHEME_BREAK_FWD; -use crate::unicode::fsm::grapheme_break_rev::GRAPHEME_BREAK_REV; -use crate::unicode::fsm::regional_indicator_rev::REGIONAL_INDICATOR_REV; -use crate::utf8; - -/// An iterator over grapheme clusters in a byte string. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::graphemes`](trait.ByteSlice.html#method.graphemes). -/// -/// Unicode defines a grapheme cluster as an *approximation* to a single user -/// visible character. A grapheme cluster, or just "grapheme," is made up of -/// one or more codepoints. For end user oriented tasks, one should generally -/// prefer using graphemes instead of [`Chars`](struct.Chars.html), which -/// always yields one codepoint at a time. -/// -/// Since graphemes are made up of one or more codepoints, this iterator yields -/// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints -/// are [substituted](index.html#handling-of-invalid-utf-8). -/// -/// This iterator can be used in reverse. When reversed, exactly the same -/// set of grapheme clusters are yielded, but in reverse order. -/// -/// This iterator only yields *extended* grapheme clusters, in accordance with -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Grapheme_Cluster_Boundaries). -#[derive(Clone, Debug)] -pub struct Graphemes<'a> { - bs: &'a [u8], -} - -impl<'a> Graphemes<'a> { - pub(crate) fn new(bs: &'a [u8]) -> Graphemes<'a> { - Graphemes { bs } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"abc".graphemes(); - /// - /// assert_eq!(b"abc", it.as_bytes()); - /// it.next(); - /// assert_eq!(b"bc", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for Graphemes<'a> { - type Item = &'a str; - - #[inline] - fn next(&mut self) -> Option<&'a str> { - let (grapheme, size) = decode_grapheme(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - Some(grapheme) - } -} - -impl<'a> DoubleEndedIterator for Graphemes<'a> { - #[inline] - fn next_back(&mut self) -> Option<&'a str> { - let (grapheme, size) = decode_last_grapheme(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[..self.bs.len() - size]; - Some(grapheme) - } -} - -/// An iterator over grapheme clusters in a byte string and their byte index -/// positions. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::grapheme_indices`](trait.ByteSlice.html#method.grapheme_indices). -/// -/// Unicode defines a grapheme cluster as an *approximation* to a single user -/// visible character. A grapheme cluster, or just "grapheme," is made up of -/// one or more codepoints. For end user oriented tasks, one should generally -/// prefer using graphemes instead of [`Chars`](struct.Chars.html), which -/// always yields one codepoint at a time. -/// -/// Since graphemes are made up of one or more codepoints, this iterator -/// yields `&str` elements (along with their start and end byte offsets). -/// When invalid UTF-8 is encountered, replacement codepoints are -/// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the -/// indices yielded by this iterator may not correspond to the length of the -/// grapheme cluster yielded with those indices. For example, when this -/// iterator encounters `\xFF` in the byte string, then it will yield a pair -/// of indices ranging over a single byte, but will provide an `&str` -/// equivalent to `"\u{FFFD}"`, which is three bytes in length. However, when -/// given only valid UTF-8, then all indices are in exact correspondence with -/// their paired grapheme cluster. -/// -/// This iterator can be used in reverse. When reversed, exactly the same -/// set of grapheme clusters are yielded, but in reverse order. -/// -/// This iterator only yields *extended* grapheme clusters, in accordance with -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Grapheme_Cluster_Boundaries). -#[derive(Clone, Debug)] -pub struct GraphemeIndices<'a> { - bs: &'a [u8], - forward_index: usize, - reverse_index: usize, -} - -impl<'a> GraphemeIndices<'a> { - pub(crate) fn new(bs: &'a [u8]) -> GraphemeIndices<'a> { - GraphemeIndices { bs: bs, forward_index: 0, reverse_index: bs.len() } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"abc".grapheme_indices(); - /// - /// assert_eq!(b"abc", it.as_bytes()); - /// it.next(); - /// assert_eq!(b"bc", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for GraphemeIndices<'a> { - type Item = (usize, usize, &'a str); - - #[inline] - fn next(&mut self) -> Option<(usize, usize, &'a str)> { - let index = self.forward_index; - let (grapheme, size) = decode_grapheme(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - self.forward_index += size; - Some((index, index + size, grapheme)) - } -} - -impl<'a> DoubleEndedIterator for GraphemeIndices<'a> { - #[inline] - fn next_back(&mut self) -> Option<(usize, usize, &'a str)> { - let (grapheme, size) = decode_last_grapheme(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[..self.bs.len() - size]; - self.reverse_index -= size; - Some((self.reverse_index, self.reverse_index + size, grapheme)) - } -} - -/// Decode a grapheme from the given byte string. -/// -/// This returns the resulting grapheme (which may be a Unicode replacement -/// codepoint if invalid UTF-8 was found), along with the number of bytes -/// decoded in the byte string. The number of bytes decoded may not be the -/// same as the length of grapheme in the case where invalid UTF-8 is found. -pub fn decode_grapheme(bs: &[u8]) -> (&str, usize) { - if bs.is_empty() { - ("", 0) - } else if let Some(end) = GRAPHEME_BREAK_FWD.find(bs) { - // Safe because a match can only occur for valid UTF-8. - let grapheme = unsafe { bs[..end].to_str_unchecked() }; - (grapheme, grapheme.len()) - } else { - const INVALID: &'static str = "\u{FFFD}"; - // No match on non-empty bytes implies we found invalid UTF-8. - let (_, size) = utf8::decode_lossy(bs); - (INVALID, size) - } -} - -fn decode_last_grapheme(bs: &[u8]) -> (&str, usize) { - if bs.is_empty() { - ("", 0) - } else if let Some(mut start) = GRAPHEME_BREAK_REV.rfind(bs) { - start = adjust_rev_for_regional_indicator(bs, start); - // Safe because a match can only occur for valid UTF-8. - let grapheme = unsafe { bs[start..].to_str_unchecked() }; - (grapheme, grapheme.len()) - } else { - const INVALID: &'static str = "\u{FFFD}"; - // No match on non-empty bytes implies we found invalid UTF-8. - let (_, size) = utf8::decode_last_lossy(bs); - (INVALID, size) - } -} - -/// Return the correct offset for the next grapheme decoded at the end of the -/// given byte string, where `i` is the initial guess. In particular, -/// `&bs[i..]` represents the candidate grapheme. -/// -/// `i` is returned by this function in all cases except when `&bs[i..]` is -/// a pair of regional indicator codepoints. In that case, if an odd number of -/// additional regional indicator codepoints precedes `i`, then `i` is -/// adjusted such that it points to only a single regional indicator. -/// -/// This "fixing" is necessary to handle the requirement that a break cannot -/// occur between regional indicators where it would cause an odd number of -/// regional indicators to exist before the break from the *start* of the -/// string. A reverse regex cannot detect this case easily without look-around. -fn adjust_rev_for_regional_indicator(mut bs: &[u8], i: usize) -> usize { - // All regional indicators use a 4 byte encoding, and we only care about - // the case where we found a pair of regional indicators. - if bs.len() - i != 8 { - return i; - } - // Count all contiguous occurrences of regional indicators. If there's an - // even number of them, then we can accept the pair we found. Otherwise, - // we can only take one of them. - // - // FIXME: This is quadratic in the worst case, e.g., a string of just - // regional indicator codepoints. A fix probably requires refactoring this - // code a bit such that we don't rescan regional indicators. - let mut count = 0; - while let Some(start) = REGIONAL_INDICATOR_REV.rfind(bs) { - bs = &bs[..start]; - count += 1; - } - if count % 2 == 0 { - i - } else { - i + 4 - } -} - -#[cfg(test)] -mod tests { - use ucd_parse::GraphemeClusterBreakTest; - - use super::*; - use crate::ext_slice::ByteSlice; - use crate::tests::LOSSY_TESTS; - - #[test] - fn forward_ucd() { - for (i, test) in ucdtests().into_iter().enumerate() { - let given = test.grapheme_clusters.concat(); - let got: Vec = Graphemes::new(given.as_bytes()) - .map(|cluster| cluster.to_string()) - .collect(); - assert_eq!( - test.grapheme_clusters, - got, - "\ngrapheme forward break test {} failed:\n\ - given: {:?}\n\ - expected: {:?}\n\ - got: {:?}\n", - i, - uniescape(&given), - uniescape_vec(&test.grapheme_clusters), - uniescape_vec(&got), - ); - } - } - - #[test] - fn reverse_ucd() { - for (i, test) in ucdtests().into_iter().enumerate() { - let given = test.grapheme_clusters.concat(); - let mut got: Vec = Graphemes::new(given.as_bytes()) - .rev() - .map(|cluster| cluster.to_string()) - .collect(); - got.reverse(); - assert_eq!( - test.grapheme_clusters, - got, - "\n\ngrapheme reverse break test {} failed:\n\ - given: {:?}\n\ - expected: {:?}\n\ - got: {:?}\n", - i, - uniescape(&given), - uniescape_vec(&test.grapheme_clusters), - uniescape_vec(&got), - ); - } - } - - #[test] - fn forward_lossy() { - for &(expected, input) in LOSSY_TESTS { - let got = Graphemes::new(input.as_bytes()).collect::(); - assert_eq!(expected, got); - } - } - - #[test] - fn reverse_lossy() { - for &(expected, input) in LOSSY_TESTS { - let expected: String = expected.chars().rev().collect(); - let got = - Graphemes::new(input.as_bytes()).rev().collect::(); - assert_eq!(expected, got); - } - } - - fn uniescape(s: &str) -> String { - s.chars().flat_map(|c| c.escape_unicode()).collect::() - } - - fn uniescape_vec(strs: &[String]) -> Vec { - strs.iter().map(|s| uniescape(s)).collect() - } - - /// Return all of the UCD for grapheme breaks. - fn ucdtests() -> Vec { - const TESTDATA: &'static str = - include_str!("data/GraphemeBreakTest.txt"); - - let mut tests = vec![]; - for mut line in TESTDATA.lines() { - line = line.trim(); - if line.starts_with("#") || line.contains("surrogate") { - continue; - } - tests.push(line.parse().unwrap()); - } - tests - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/mod.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -pub use self::grapheme::{decode_grapheme, GraphemeIndices, Graphemes}; -pub use self::sentence::{SentenceIndices, Sentences}; -pub use self::whitespace::{whitespace_len_fwd, whitespace_len_rev}; -pub use self::word::{ - WordIndices, Words, WordsWithBreakIndices, WordsWithBreaks, -}; - -mod fsm; -mod grapheme; -mod sentence; -mod whitespace; -mod word; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/sentence.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/sentence.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/sentence.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/sentence.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,220 +0,0 @@ -use regex_automata::DFA; - -use crate::ext_slice::ByteSlice; -use crate::unicode::fsm::sentence_break_fwd::SENTENCE_BREAK_FWD; -use crate::utf8; - -/// An iterator over sentences in a byte string. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::sentences`](trait.ByteSlice.html#method.sentences). -/// -/// Sentences typically include their trailing punctuation and whitespace. -/// -/// Since sentences are made up of one or more codepoints, this iterator yields -/// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints -/// are [substituted](index.html#handling-of-invalid-utf-8). -/// -/// This iterator yields words in accordance with the default sentence boundary -/// rules specified in -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Sentence_Boundaries). -#[derive(Clone, Debug)] -pub struct Sentences<'a> { - bs: &'a [u8], -} - -impl<'a> Sentences<'a> { - pub(crate) fn new(bs: &'a [u8]) -> Sentences<'a> { - Sentences { bs } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"I want this. Not that. Right now.".sentences(); - /// - /// assert_eq!(&b"I want this. Not that. Right now."[..], it.as_bytes()); - /// it.next(); - /// assert_eq!(b"Not that. Right now.", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for Sentences<'a> { - type Item = &'a str; - - #[inline] - fn next(&mut self) -> Option<&'a str> { - let (sentence, size) = decode_sentence(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - Some(sentence) - } -} - -/// An iterator over sentences in a byte string, along with their byte offsets. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::sentence_indices`](trait.ByteSlice.html#method.sentence_indices). -/// -/// Sentences typically include their trailing punctuation and whitespace. -/// -/// Since sentences are made up of one or more codepoints, this iterator -/// yields `&str` elements (along with their start and end byte offsets). -/// When invalid UTF-8 is encountered, replacement codepoints are -/// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the -/// indices yielded by this iterator may not correspond to the length of the -/// sentence yielded with those indices. For example, when this iterator -/// encounters `\xFF` in the byte string, then it will yield a pair of indices -/// ranging over a single byte, but will provide an `&str` equivalent to -/// `"\u{FFFD}"`, which is three bytes in length. However, when given only -/// valid UTF-8, then all indices are in exact correspondence with their paired -/// word. -/// -/// This iterator yields words in accordance with the default sentence boundary -/// rules specified in -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Sentence_Boundaries). -#[derive(Clone, Debug)] -pub struct SentenceIndices<'a> { - bs: &'a [u8], - forward_index: usize, -} - -impl<'a> SentenceIndices<'a> { - pub(crate) fn new(bs: &'a [u8]) -> SentenceIndices<'a> { - SentenceIndices { bs: bs, forward_index: 0 } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"I want this. Not that. Right now.".sentence_indices(); - /// - /// assert_eq!(&b"I want this. Not that. Right now."[..], it.as_bytes()); - /// it.next(); - /// assert_eq!(b"Not that. Right now.", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for SentenceIndices<'a> { - type Item = (usize, usize, &'a str); - - #[inline] - fn next(&mut self) -> Option<(usize, usize, &'a str)> { - let index = self.forward_index; - let (word, size) = decode_sentence(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - self.forward_index += size; - Some((index, index + size, word)) - } -} - -fn decode_sentence(bs: &[u8]) -> (&str, usize) { - if bs.is_empty() { - ("", 0) - } else if let Some(end) = SENTENCE_BREAK_FWD.find(bs) { - // Safe because a match can only occur for valid UTF-8. - let sentence = unsafe { bs[..end].to_str_unchecked() }; - (sentence, sentence.len()) - } else { - const INVALID: &'static str = "\u{FFFD}"; - // No match on non-empty bytes implies we found invalid UTF-8. - let (_, size) = utf8::decode_lossy(bs); - (INVALID, size) - } -} - -#[cfg(test)] -mod tests { - use ucd_parse::SentenceBreakTest; - - use crate::ext_slice::ByteSlice; - - #[test] - fn forward_ucd() { - for (i, test) in ucdtests().into_iter().enumerate() { - let given = test.sentences.concat(); - let got = sentences(given.as_bytes()); - assert_eq!( - test.sentences, - got, - "\n\nsentence forward break test {} failed:\n\ - given: {:?}\n\ - expected: {:?}\n\ - got: {:?}\n", - i, - given, - strs_to_bstrs(&test.sentences), - strs_to_bstrs(&got), - ); - } - } - - // Some additional tests that don't seem to be covered by the UCD tests. - #[test] - fn forward_additional() { - assert_eq!(vec!["a.. ", "A"], sentences(b"a.. A")); - assert_eq!(vec!["a.. a"], sentences(b"a.. a")); - - assert_eq!(vec!["a... ", "A"], sentences(b"a... A")); - assert_eq!(vec!["a... a"], sentences(b"a... a")); - - assert_eq!(vec!["a...,..., a"], sentences(b"a...,..., a")); - } - - fn sentences(bytes: &[u8]) -> Vec<&str> { - bytes.sentences().collect() - } - - fn strs_to_bstrs>(strs: &[S]) -> Vec<&[u8]> { - strs.iter().map(|s| s.as_ref().as_bytes()).collect() - } - - /// Return all of the UCD for sentence breaks. - fn ucdtests() -> Vec { - const TESTDATA: &'static str = - include_str!("data/SentenceBreakTest.txt"); - - let mut tests = vec![]; - for mut line in TESTDATA.lines() { - line = line.trim(); - if line.starts_with("#") || line.contains("surrogate") { - continue; - } - tests.push(line.parse().unwrap()); - } - tests - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/whitespace.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/whitespace.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/whitespace.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/whitespace.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -use regex_automata::DFA; - -use crate::unicode::fsm::whitespace_anchored_fwd::WHITESPACE_ANCHORED_FWD; -use crate::unicode::fsm::whitespace_anchored_rev::WHITESPACE_ANCHORED_REV; - -/// Return the first position of a non-whitespace character. -pub fn whitespace_len_fwd(slice: &[u8]) -> usize { - WHITESPACE_ANCHORED_FWD.find(slice).unwrap_or(0) -} - -/// Return the last position of a non-whitespace character. -pub fn whitespace_len_rev(slice: &[u8]) -> usize { - WHITESPACE_ANCHORED_REV.rfind(slice).unwrap_or(slice.len()) -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/word.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/word.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/word.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/unicode/word.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,406 +0,0 @@ -use regex_automata::DFA; - -use crate::ext_slice::ByteSlice; -use crate::unicode::fsm::simple_word_fwd::SIMPLE_WORD_FWD; -use crate::unicode::fsm::word_break_fwd::WORD_BREAK_FWD; -use crate::utf8; - -/// An iterator over words in a byte string. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::words`](trait.ByteSlice.html#method.words). -/// -/// This is similar to the [`WordsWithBreaks`](struct.WordsWithBreaks.html) -/// iterator, except it only returns elements that contain a "word" character. -/// A word character is defined by UTS #18 (Annex C) to be the combination -/// of the `Alphabetic` and `Join_Control` properties, along with the -/// `Decimal_Number`, `Mark` and `Connector_Punctuation` general categories. -/// -/// Since words are made up of one or more codepoints, this iterator yields -/// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints -/// are [substituted](index.html#handling-of-invalid-utf-8). -/// -/// This iterator yields words in accordance with the default word boundary -/// rules specified in -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). -/// In particular, this may not be suitable for Japanese and Chinese scripts -/// that do not use spaces between words. -#[derive(Clone, Debug)] -pub struct Words<'a>(WordsWithBreaks<'a>); - -impl<'a> Words<'a> { - pub(crate) fn new(bs: &'a [u8]) -> Words<'a> { - Words(WordsWithBreaks::new(bs)) - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"foo bar baz".words(); - /// - /// assert_eq!(b"foo bar baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b" baz", it.as_bytes()); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.0.as_bytes() - } -} - -impl<'a> Iterator for Words<'a> { - type Item = &'a str; - - #[inline] - fn next(&mut self) -> Option<&'a str> { - while let Some(word) = self.0.next() { - if SIMPLE_WORD_FWD.is_match(word.as_bytes()) { - return Some(word); - } - } - None - } -} - -/// An iterator over words in a byte string and their byte index positions. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::word_indices`](trait.ByteSlice.html#method.word_indices). -/// -/// This is similar to the -/// [`WordsWithBreakIndices`](struct.WordsWithBreakIndices.html) iterator, -/// except it only returns elements that contain a "word" character. A -/// word character is defined by UTS #18 (Annex C) to be the combination -/// of the `Alphabetic` and `Join_Control` properties, along with the -/// `Decimal_Number`, `Mark` and `Connector_Punctuation` general categories. -/// -/// Since words are made up of one or more codepoints, this iterator -/// yields `&str` elements (along with their start and end byte offsets). -/// When invalid UTF-8 is encountered, replacement codepoints are -/// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the -/// indices yielded by this iterator may not correspond to the length of the -/// word yielded with those indices. For example, when this iterator encounters -/// `\xFF` in the byte string, then it will yield a pair of indices ranging -/// over a single byte, but will provide an `&str` equivalent to `"\u{FFFD}"`, -/// which is three bytes in length. However, when given only valid UTF-8, then -/// all indices are in exact correspondence with their paired word. -/// -/// This iterator yields words in accordance with the default word boundary -/// rules specified in -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). -/// In particular, this may not be suitable for Japanese and Chinese scripts -/// that do not use spaces between words. -#[derive(Clone, Debug)] -pub struct WordIndices<'a>(WordsWithBreakIndices<'a>); - -impl<'a> WordIndices<'a> { - pub(crate) fn new(bs: &'a [u8]) -> WordIndices<'a> { - WordIndices(WordsWithBreakIndices::new(bs)) - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"foo bar baz".word_indices(); - /// - /// assert_eq!(b"foo bar baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b" baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.0.as_bytes() - } -} - -impl<'a> Iterator for WordIndices<'a> { - type Item = (usize, usize, &'a str); - - #[inline] - fn next(&mut self) -> Option<(usize, usize, &'a str)> { - while let Some((start, end, word)) = self.0.next() { - if SIMPLE_WORD_FWD.is_match(word.as_bytes()) { - return Some((start, end, word)); - } - } - None - } -} - -/// An iterator over all word breaks in a byte string. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::words_with_breaks`](trait.ByteSlice.html#method.words_with_breaks). -/// -/// This iterator yields not only all words, but the content that comes between -/// words. In particular, if all elements yielded by this iterator are -/// concatenated, then the result is the original string (subject to Unicode -/// replacement codepoint substitutions). -/// -/// Since words are made up of one or more codepoints, this iterator yields -/// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints -/// are [substituted](index.html#handling-of-invalid-utf-8). -/// -/// This iterator yields words in accordance with the default word boundary -/// rules specified in -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). -/// In particular, this may not be suitable for Japanese and Chinese scripts -/// that do not use spaces between words. -#[derive(Clone, Debug)] -pub struct WordsWithBreaks<'a> { - bs: &'a [u8], -} - -impl<'a> WordsWithBreaks<'a> { - pub(crate) fn new(bs: &'a [u8]) -> WordsWithBreaks<'a> { - WordsWithBreaks { bs } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"foo bar baz".words_with_breaks(); - /// - /// assert_eq!(b"foo bar baz", it.as_bytes()); - /// it.next(); - /// assert_eq!(b" bar baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b" baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for WordsWithBreaks<'a> { - type Item = &'a str; - - #[inline] - fn next(&mut self) -> Option<&'a str> { - let (word, size) = decode_word(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - Some(word) - } -} - -/// An iterator over all word breaks in a byte string, along with their byte -/// index positions. -/// -/// This iterator is typically constructed by -/// [`ByteSlice::words_with_break_indices`](trait.ByteSlice.html#method.words_with_break_indices). -/// -/// This iterator yields not only all words, but the content that comes between -/// words. In particular, if all elements yielded by this iterator are -/// concatenated, then the result is the original string (subject to Unicode -/// replacement codepoint substitutions). -/// -/// Since words are made up of one or more codepoints, this iterator -/// yields `&str` elements (along with their start and end byte offsets). -/// When invalid UTF-8 is encountered, replacement codepoints are -/// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the -/// indices yielded by this iterator may not correspond to the length of the -/// word yielded with those indices. For example, when this iterator encounters -/// `\xFF` in the byte string, then it will yield a pair of indices ranging -/// over a single byte, but will provide an `&str` equivalent to `"\u{FFFD}"`, -/// which is three bytes in length. However, when given only valid UTF-8, then -/// all indices are in exact correspondence with their paired word. -/// -/// This iterator yields words in accordance with the default word boundary -/// rules specified in -/// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). -/// In particular, this may not be suitable for Japanese and Chinese scripts -/// that do not use spaces between words. -#[derive(Clone, Debug)] -pub struct WordsWithBreakIndices<'a> { - bs: &'a [u8], - forward_index: usize, -} - -impl<'a> WordsWithBreakIndices<'a> { - pub(crate) fn new(bs: &'a [u8]) -> WordsWithBreakIndices<'a> { - WordsWithBreakIndices { bs: bs, forward_index: 0 } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"foo bar baz".words_with_break_indices(); - /// - /// assert_eq!(b"foo bar baz", it.as_bytes()); - /// it.next(); - /// assert_eq!(b" bar baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b" baz", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for WordsWithBreakIndices<'a> { - type Item = (usize, usize, &'a str); - - #[inline] - fn next(&mut self) -> Option<(usize, usize, &'a str)> { - let index = self.forward_index; - let (word, size) = decode_word(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - self.forward_index += size; - Some((index, index + size, word)) - } -} - -fn decode_word(bs: &[u8]) -> (&str, usize) { - if bs.is_empty() { - ("", 0) - } else if let Some(end) = WORD_BREAK_FWD.find(bs) { - // Safe because a match can only occur for valid UTF-8. - let word = unsafe { bs[..end].to_str_unchecked() }; - (word, word.len()) - } else { - const INVALID: &'static str = "\u{FFFD}"; - // No match on non-empty bytes implies we found invalid UTF-8. - let (_, size) = utf8::decode_lossy(bs); - (INVALID, size) - } -} - -#[cfg(test)] -mod tests { - use ucd_parse::WordBreakTest; - - use crate::ext_slice::ByteSlice; - - #[test] - fn forward_ucd() { - for (i, test) in ucdtests().into_iter().enumerate() { - let given = test.words.concat(); - let got = words(given.as_bytes()); - assert_eq!( - test.words, - got, - "\n\nword forward break test {} failed:\n\ - given: {:?}\n\ - expected: {:?}\n\ - got: {:?}\n", - i, - given, - strs_to_bstrs(&test.words), - strs_to_bstrs(&got), - ); - } - } - - // Some additional tests that don't seem to be covered by the UCD tests. - // - // It's pretty amazing that the UCD tests miss these cases. I only found - // them by running this crate's segmenter and ICU's segmenter on the same - // text and comparing the output. - #[test] - fn forward_additional() { - assert_eq!(vec!["a", ".", " ", "Y"], words(b"a. Y")); - assert_eq!(vec!["r", ".", " ", "Yo"], words(b"r. Yo")); - assert_eq!( - vec!["whatsoever", ".", " ", "You", " ", "may"], - words(b"whatsoever. You may") - ); - assert_eq!( - vec!["21stcentury'syesterday"], - words(b"21stcentury'syesterday") - ); - - assert_eq!(vec!["Bonta_", "'", "s"], words(b"Bonta_'s")); - assert_eq!(vec!["_vhat's"], words(b"_vhat's")); - assert_eq!(vec!["__on'anima"], words(b"__on'anima")); - assert_eq!(vec!["123_", "'", "4"], words(b"123_'4")); - assert_eq!(vec!["_123'4"], words(b"_123'4")); - assert_eq!(vec!["__12'345"], words(b"__12'345")); - - assert_eq!( - vec!["tomorrowat4", ":", "00", ","], - words(b"tomorrowat4:00,") - ); - assert_eq!(vec!["RS1", "'", "s"], words(b"RS1's")); - assert_eq!(vec!["X38"], words(b"X38")); - - assert_eq!(vec!["4abc", ":", "00", ","], words(b"4abc:00,")); - assert_eq!(vec!["12S", "'", "1"], words(b"12S'1")); - assert_eq!(vec!["1XY"], words(b"1XY")); - - assert_eq!(vec!["\u{FEFF}", "Ты"], words("\u{FEFF}Ты".as_bytes())); - } - - fn words(bytes: &[u8]) -> Vec<&str> { - bytes.words_with_breaks().collect() - } - - fn strs_to_bstrs>(strs: &[S]) -> Vec<&[u8]> { - strs.iter().map(|s| s.as_ref().as_bytes()).collect() - } - - /// Return all of the UCD for word breaks. - fn ucdtests() -> Vec { - const TESTDATA: &'static str = include_str!("data/WordBreakTest.txt"); - - let mut tests = vec![]; - for mut line in TESTDATA.lines() { - line = line.trim(); - if line.starts_with("#") || line.contains("surrogate") { - continue; - } - tests.push(line.parse().unwrap()); - } - tests - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/utf8.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/utf8.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/utf8.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bstr-0.2.17/src/utf8.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1370 +0,0 @@ -use core::char; -use core::cmp; -use core::fmt; -use core::str; -#[cfg(feature = "std")] -use std::error; - -use crate::ascii; -use crate::bstr::BStr; -use crate::ext_slice::ByteSlice; - -// The UTF-8 decoder provided here is based on the one presented here: -// https://bjoern.hoehrmann.de/utf-8/decoder/dfa/ -// -// We *could* have done UTF-8 decoding by using a DFA generated by `\p{any}` -// using regex-automata that is roughly the same size. The real benefit of -// Hoehrmann's formulation is that the byte class mapping below is manually -// tailored such that each byte's class doubles as a shift to mask out the -// bits necessary for constructing the leading bits of each codepoint value -// from the initial byte. -// -// There are some minor differences between this implementation and Hoehrmann's -// formulation. -// -// Firstly, we make REJECT have state ID 0, since it makes the state table -// itself a little easier to read and is consistent with the notion that 0 -// means "false" or "bad." -// -// Secondly, when doing bulk decoding, we add a SIMD accelerated ASCII fast -// path. -// -// Thirdly, we pre-multiply the state IDs to avoid a multiplication instruction -// in the core decoding loop. (Which is what regex-automata would do by -// default.) -// -// Fourthly, we split the byte class mapping and transition table into two -// arrays because it's clearer. -// -// It is unlikely that this is the fastest way to do UTF-8 decoding, however, -// it is fairly simple. - -const ACCEPT: usize = 12; -const REJECT: usize = 0; - -/// SAFETY: The decode below function relies on the correctness of these -/// equivalence classes. -#[cfg_attr(rustfmt, rustfmt::skip)] -const CLASSES: [u8; 256] = [ - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, - 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, - 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8, -]; - -/// SAFETY: The decode below function relies on the correctness of this state -/// machine. -#[cfg_attr(rustfmt, rustfmt::skip)] -const STATES_FORWARD: &'static [u8] = &[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 12, 0, 24, 36, 60, 96, 84, 0, 0, 0, 48, 72, - 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, - 0, 24, 0, 0, 0, 0, 0, 24, 0, 24, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, - 0, 24, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 36, 0, 36, 0, 0, - 0, 36, 0, 0, 0, 0, 0, 36, 0, 36, 0, 0, - 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; - -/// An iterator over Unicode scalar values in a byte string. -/// -/// When invalid UTF-8 byte sequences are found, they are substituted with the -/// Unicode replacement codepoint (`U+FFFD`) using the -/// ["maximal subpart" strategy](http://www.unicode.org/review/pr-121.html). -/// -/// This iterator is created by the -/// [`chars`](trait.ByteSlice.html#method.chars) method provided by the -/// [`ByteSlice`](trait.ByteSlice.html) extension trait for `&[u8]`. -#[derive(Clone, Debug)] -pub struct Chars<'a> { - bs: &'a [u8], -} - -impl<'a> Chars<'a> { - pub(crate) fn new(bs: &'a [u8]) -> Chars<'a> { - Chars { bs } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut chars = b"abc".chars(); - /// - /// assert_eq!(b"abc", chars.as_bytes()); - /// chars.next(); - /// assert_eq!(b"bc", chars.as_bytes()); - /// chars.next(); - /// chars.next(); - /// assert_eq!(b"", chars.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for Chars<'a> { - type Item = char; - - #[inline] - fn next(&mut self) -> Option { - let (ch, size) = decode_lossy(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - Some(ch) - } -} - -impl<'a> DoubleEndedIterator for Chars<'a> { - #[inline] - fn next_back(&mut self) -> Option { - let (ch, size) = decode_last_lossy(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[..self.bs.len() - size]; - Some(ch) - } -} - -/// An iterator over Unicode scalar values in a byte string and their -/// byte index positions. -/// -/// When invalid UTF-8 byte sequences are found, they are substituted with the -/// Unicode replacement codepoint (`U+FFFD`) using the -/// ["maximal subpart" strategy](http://www.unicode.org/review/pr-121.html). -/// -/// Note that this is slightly different from the `CharIndices` iterator -/// provided by the standard library. Aside from working on possibly invalid -/// UTF-8, this iterator provides both the corresponding starting and ending -/// byte indices of each codepoint yielded. The ending position is necessary to -/// slice the original byte string when invalid UTF-8 bytes are converted into -/// a Unicode replacement codepoint, since a single replacement codepoint can -/// substitute anywhere from 1 to 3 invalid bytes (inclusive). -/// -/// This iterator is created by the -/// [`char_indices`](trait.ByteSlice.html#method.char_indices) method provided -/// by the [`ByteSlice`](trait.ByteSlice.html) extension trait for `&[u8]`. -#[derive(Clone, Debug)] -pub struct CharIndices<'a> { - bs: &'a [u8], - forward_index: usize, - reverse_index: usize, -} - -impl<'a> CharIndices<'a> { - pub(crate) fn new(bs: &'a [u8]) -> CharIndices<'a> { - CharIndices { bs: bs, forward_index: 0, reverse_index: bs.len() } - } - - /// View the underlying data as a subslice of the original data. - /// - /// The slice returned has the same lifetime as the original slice, and so - /// the iterator can continue to be used while this exists. - /// - /// # Examples - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let mut it = b"abc".char_indices(); - /// - /// assert_eq!(b"abc", it.as_bytes()); - /// it.next(); - /// assert_eq!(b"bc", it.as_bytes()); - /// it.next(); - /// it.next(); - /// assert_eq!(b"", it.as_bytes()); - /// ``` - #[inline] - pub fn as_bytes(&self) -> &'a [u8] { - self.bs - } -} - -impl<'a> Iterator for CharIndices<'a> { - type Item = (usize, usize, char); - - #[inline] - fn next(&mut self) -> Option<(usize, usize, char)> { - let index = self.forward_index; - let (ch, size) = decode_lossy(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[size..]; - self.forward_index += size; - Some((index, index + size, ch)) - } -} - -impl<'a> DoubleEndedIterator for CharIndices<'a> { - #[inline] - fn next_back(&mut self) -> Option<(usize, usize, char)> { - let (ch, size) = decode_last_lossy(self.bs); - if size == 0 { - return None; - } - self.bs = &self.bs[..self.bs.len() - size]; - self.reverse_index -= size; - Some((self.reverse_index, self.reverse_index + size, ch)) - } -} - -impl<'a> ::core::iter::FusedIterator for CharIndices<'a> {} - -/// An iterator over chunks of valid UTF-8 in a byte slice. -/// -/// See [`utf8_chunks`](trait.ByteSlice.html#method.utf8_chunks). -#[derive(Clone, Debug)] -pub struct Utf8Chunks<'a> { - pub(super) bytes: &'a [u8], -} - -/// A chunk of valid UTF-8, possibly followed by invalid UTF-8 bytes. -/// -/// This is yielded by the -/// [`Utf8Chunks`](struct.Utf8Chunks.html) -/// iterator, which can be created via the -/// [`ByteSlice::utf8_chunks`](trait.ByteSlice.html#method.utf8_chunks) -/// method. -/// -/// The `'a` lifetime parameter corresponds to the lifetime of the bytes that -/// are being iterated over. -#[cfg_attr(test, derive(Debug, PartialEq))] -pub struct Utf8Chunk<'a> { - /// A valid UTF-8 piece, at the start, end, or between invalid UTF-8 bytes. - /// - /// This is empty between adjacent invalid UTF-8 byte sequences. - valid: &'a str, - /// A sequence of invalid UTF-8 bytes. - /// - /// Can only be empty in the last chunk. - /// - /// Should be replaced by a single unicode replacement character, if not - /// empty. - invalid: &'a BStr, - /// Indicates whether the invalid sequence could've been valid if there - /// were more bytes. - /// - /// Can only be true in the last chunk. - incomplete: bool, -} - -impl<'a> Utf8Chunk<'a> { - /// Returns the (possibly empty) valid UTF-8 bytes in this chunk. - /// - /// This may be empty if there are consecutive sequences of invalid UTF-8 - /// bytes. - #[inline] - pub fn valid(&self) -> &'a str { - self.valid - } - - /// Returns the (possibly empty) invalid UTF-8 bytes in this chunk that - /// immediately follow the valid UTF-8 bytes in this chunk. - /// - /// This is only empty when this chunk corresponds to the last chunk in - /// the original bytes. - /// - /// The maximum length of this slice is 3. That is, invalid UTF-8 byte - /// sequences greater than 1 always correspond to a valid _prefix_ of - /// a valid UTF-8 encoded codepoint. This corresponds to the "substitution - /// of maximal subparts" strategy that is described in more detail in the - /// docs for the - /// [`ByteSlice::to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) - /// method. - #[inline] - pub fn invalid(&self) -> &'a [u8] { - self.invalid.as_bytes() - } - - /// Returns whether the invalid sequence might still become valid if more - /// bytes are added. - /// - /// Returns true if the end of the input was reached unexpectedly, - /// without encountering an unexpected byte. - /// - /// This can only be the case for the last chunk. - #[inline] - pub fn incomplete(&self) -> bool { - self.incomplete - } -} - -impl<'a> Iterator for Utf8Chunks<'a> { - type Item = Utf8Chunk<'a>; - - #[inline] - fn next(&mut self) -> Option> { - if self.bytes.is_empty() { - return None; - } - match validate(self.bytes) { - Ok(()) => { - let valid = self.bytes; - self.bytes = &[]; - Some(Utf8Chunk { - // SAFETY: This is safe because of the guarantees provided - // by utf8::validate. - valid: unsafe { str::from_utf8_unchecked(valid) }, - invalid: [].as_bstr(), - incomplete: false, - }) - } - Err(e) => { - let (valid, rest) = self.bytes.split_at(e.valid_up_to()); - // SAFETY: This is safe because of the guarantees provided by - // utf8::validate. - let valid = unsafe { str::from_utf8_unchecked(valid) }; - let (invalid_len, incomplete) = match e.error_len() { - Some(n) => (n, false), - None => (rest.len(), true), - }; - let (invalid, rest) = rest.split_at(invalid_len); - self.bytes = rest; - Some(Utf8Chunk { - valid, - invalid: invalid.as_bstr(), - incomplete, - }) - } - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - if self.bytes.is_empty() { - (0, Some(0)) - } else { - (1, Some(self.bytes.len())) - } - } -} - -impl<'a> ::core::iter::FusedIterator for Utf8Chunks<'a> {} - -/// An error that occurs when UTF-8 decoding fails. -/// -/// This error occurs when attempting to convert a non-UTF-8 byte -/// string to a Rust string that must be valid UTF-8. For example, -/// [`to_str`](trait.ByteSlice.html#method.to_str) is one such method. -/// -/// # Example -/// -/// This example shows what happens when a given byte sequence is invalid, -/// but ends with a sequence that is a possible prefix of valid UTF-8. -/// -/// ``` -/// use bstr::{B, ByteSlice}; -/// -/// let s = B(b"foobar\xF1\x80\x80"); -/// let err = s.to_str().unwrap_err(); -/// assert_eq!(err.valid_up_to(), 6); -/// assert_eq!(err.error_len(), None); -/// ``` -/// -/// This example shows what happens when a given byte sequence contains -/// invalid UTF-8. -/// -/// ``` -/// use bstr::ByteSlice; -/// -/// let s = b"foobar\xF1\x80\x80quux"; -/// let err = s.to_str().unwrap_err(); -/// assert_eq!(err.valid_up_to(), 6); -/// // The error length reports the maximum number of bytes that correspond to -/// // a valid prefix of a UTF-8 encoded codepoint. -/// assert_eq!(err.error_len(), Some(3)); -/// -/// // In contrast to the above which contains a single invalid prefix, -/// // consider the case of multiple individal bytes that are never valid -/// // prefixes. Note how the value of error_len changes! -/// let s = b"foobar\xFF\xFFquux"; -/// let err = s.to_str().unwrap_err(); -/// assert_eq!(err.valid_up_to(), 6); -/// assert_eq!(err.error_len(), Some(1)); -/// -/// // The fact that it's an invalid prefix does not change error_len even -/// // when it immediately precedes the end of the string. -/// let s = b"foobar\xFF"; -/// let err = s.to_str().unwrap_err(); -/// assert_eq!(err.valid_up_to(), 6); -/// assert_eq!(err.error_len(), Some(1)); -/// ``` -#[derive(Debug, Eq, PartialEq)] -pub struct Utf8Error { - valid_up_to: usize, - error_len: Option, -} - -impl Utf8Error { - /// Returns the byte index of the position immediately following the last - /// valid UTF-8 byte. - /// - /// # Example - /// - /// This examples shows how `valid_up_to` can be used to retrieve a - /// possibly empty prefix that is guaranteed to be valid UTF-8: - /// - /// ``` - /// use bstr::ByteSlice; - /// - /// let s = b"foobar\xF1\x80\x80quux"; - /// let err = s.to_str().unwrap_err(); - /// - /// // This is guaranteed to never panic. - /// let string = s[..err.valid_up_to()].to_str().unwrap(); - /// assert_eq!(string, "foobar"); - /// ``` - #[inline] - pub fn valid_up_to(&self) -> usize { - self.valid_up_to - } - - /// Returns the total number of invalid UTF-8 bytes immediately following - /// the position returned by `valid_up_to`. This value is always at least - /// `1`, but can be up to `3` if bytes form a valid prefix of some UTF-8 - /// encoded codepoint. - /// - /// If the end of the original input was found before a valid UTF-8 encoded - /// codepoint could be completed, then this returns `None`. This is useful - /// when processing streams, where a `None` value signals that more input - /// might be needed. - #[inline] - pub fn error_len(&self) -> Option { - self.error_len - } -} - -#[cfg(feature = "std")] -impl error::Error for Utf8Error { - fn description(&self) -> &str { - "invalid UTF-8" - } -} - -impl fmt::Display for Utf8Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "invalid UTF-8 found at byte offset {}", self.valid_up_to) - } -} - -/// Returns OK if and only if the given slice is completely valid UTF-8. -/// -/// If the slice isn't valid UTF-8, then an error is returned that explains -/// the first location at which invalid UTF-8 was detected. -pub fn validate(slice: &[u8]) -> Result<(), Utf8Error> { - // The fast path for validating UTF-8. It steps through a UTF-8 automaton - // and uses a SIMD accelerated ASCII fast path on x86_64. If an error is - // detected, it backs up and runs the slower version of the UTF-8 automaton - // to determine correct error information. - fn fast(slice: &[u8]) -> Result<(), Utf8Error> { - let mut state = ACCEPT; - let mut i = 0; - - while i < slice.len() { - let b = slice[i]; - - // ASCII fast path. If we see two consecutive ASCII bytes, then try - // to validate as much ASCII as possible very quickly. - if state == ACCEPT - && b <= 0x7F - && slice.get(i + 1).map_or(false, |&b| b <= 0x7F) - { - i += ascii::first_non_ascii_byte(&slice[i..]); - continue; - } - - state = step(state, b); - if state == REJECT { - return Err(find_valid_up_to(slice, i)); - } - i += 1; - } - if state != ACCEPT { - Err(find_valid_up_to(slice, slice.len())) - } else { - Ok(()) - } - } - - // Given the first position at which a UTF-8 sequence was determined to be - // invalid, return an error that correctly reports the position at which - // the last complete UTF-8 sequence ends. - #[inline(never)] - fn find_valid_up_to(slice: &[u8], rejected_at: usize) -> Utf8Error { - // In order to find the last valid byte, we need to back up an amount - // that guarantees every preceding byte is part of a valid UTF-8 - // code unit sequence. To do this, we simply locate the last leading - // byte that occurs before rejected_at. - let mut backup = rejected_at.saturating_sub(1); - while backup > 0 && !is_leading_or_invalid_utf8_byte(slice[backup]) { - backup -= 1; - } - let upto = cmp::min(slice.len(), rejected_at.saturating_add(1)); - let mut err = slow(&slice[backup..upto]).unwrap_err(); - err.valid_up_to += backup; - err - } - - // Like top-level UTF-8 decoding, except it correctly reports a UTF-8 error - // when an invalid sequence is found. This is split out from validate so - // that the fast path doesn't need to keep track of the position of the - // last valid UTF-8 byte. In particular, tracking this requires checking - // for an ACCEPT state on each byte, which degrades throughput pretty - // badly. - fn slow(slice: &[u8]) -> Result<(), Utf8Error> { - let mut state = ACCEPT; - let mut valid_up_to = 0; - for (i, &b) in slice.iter().enumerate() { - state = step(state, b); - if state == ACCEPT { - valid_up_to = i + 1; - } else if state == REJECT { - // Our error length must always be at least 1. - let error_len = Some(cmp::max(1, i - valid_up_to)); - return Err(Utf8Error { valid_up_to, error_len }); - } - } - if state != ACCEPT { - Err(Utf8Error { valid_up_to, error_len: None }) - } else { - Ok(()) - } - } - - // Advance to the next state given the current state and current byte. - fn step(state: usize, b: u8) -> usize { - let class = CLASSES[b as usize]; - // SAFETY: This is safe because 'class' is always <=11 and 'state' is - // always <=96. Therefore, the maximal index is 96+11 = 107, where - // STATES_FORWARD.len() = 108 such that every index is guaranteed to be - // valid by construction of the state machine and the byte equivalence - // classes. - unsafe { - *STATES_FORWARD.get_unchecked(state + class as usize) as usize - } - } - - fast(slice) -} - -/// UTF-8 decode a single Unicode scalar value from the beginning of a slice. -/// -/// When successful, the corresponding Unicode scalar value is returned along -/// with the number of bytes it was encoded with. The number of bytes consumed -/// for a successful decode is always between 1 and 4, inclusive. -/// -/// When unsuccessful, `None` is returned along with the number of bytes that -/// make up a maximal prefix of a valid UTF-8 code unit sequence. In this case, -/// the number of bytes consumed is always between 0 and 3, inclusive, where -/// 0 is only returned when `slice` is empty. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use bstr::decode_utf8; -/// -/// // Decoding a valid codepoint. -/// let (ch, size) = decode_utf8(b"\xE2\x98\x83"); -/// assert_eq!(Some('☃'), ch); -/// assert_eq!(3, size); -/// -/// // Decoding an incomplete codepoint. -/// let (ch, size) = decode_utf8(b"\xE2\x98"); -/// assert_eq!(None, ch); -/// assert_eq!(2, size); -/// ``` -/// -/// This example shows how to iterate over all codepoints in UTF-8 encoded -/// bytes, while replacing invalid UTF-8 sequences with the replacement -/// codepoint: -/// -/// ``` -/// use bstr::{B, decode_utf8}; -/// -/// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); -/// let mut chars = vec![]; -/// while !bytes.is_empty() { -/// let (ch, size) = decode_utf8(bytes); -/// bytes = &bytes[size..]; -/// chars.push(ch.unwrap_or('\u{FFFD}')); -/// } -/// assert_eq!(vec!['☃', '\u{FFFD}', '𝞃', '\u{FFFD}', 'a'], chars); -/// ``` -#[inline] -pub fn decode>(slice: B) -> (Option, usize) { - let slice = slice.as_ref(); - match slice.get(0) { - None => return (None, 0), - Some(&b) if b <= 0x7F => return (Some(b as char), 1), - _ => {} - } - - let (mut state, mut cp, mut i) = (ACCEPT, 0, 0); - while i < slice.len() { - decode_step(&mut state, &mut cp, slice[i]); - i += 1; - - if state == ACCEPT { - // SAFETY: This is safe because `decode_step` guarantees that - // `cp` is a valid Unicode scalar value in an ACCEPT state. - let ch = unsafe { char::from_u32_unchecked(cp) }; - return (Some(ch), i); - } else if state == REJECT { - // At this point, we always want to advance at least one byte. - return (None, cmp::max(1, i.saturating_sub(1))); - } - } - (None, i) -} - -/// Lossily UTF-8 decode a single Unicode scalar value from the beginning of a -/// slice. -/// -/// When successful, the corresponding Unicode scalar value is returned along -/// with the number of bytes it was encoded with. The number of bytes consumed -/// for a successful decode is always between 1 and 4, inclusive. -/// -/// When unsuccessful, the Unicode replacement codepoint (`U+FFFD`) is returned -/// along with the number of bytes that make up a maximal prefix of a valid -/// UTF-8 code unit sequence. In this case, the number of bytes consumed is -/// always between 0 and 3, inclusive, where 0 is only returned when `slice` is -/// empty. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ```ignore -/// use bstr::decode_utf8_lossy; -/// -/// // Decoding a valid codepoint. -/// let (ch, size) = decode_utf8_lossy(b"\xE2\x98\x83"); -/// assert_eq!('☃', ch); -/// assert_eq!(3, size); -/// -/// // Decoding an incomplete codepoint. -/// let (ch, size) = decode_utf8_lossy(b"\xE2\x98"); -/// assert_eq!('\u{FFFD}', ch); -/// assert_eq!(2, size); -/// ``` -/// -/// This example shows how to iterate over all codepoints in UTF-8 encoded -/// bytes, while replacing invalid UTF-8 sequences with the replacement -/// codepoint: -/// -/// ```ignore -/// use bstr::{B, decode_utf8_lossy}; -/// -/// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); -/// let mut chars = vec![]; -/// while !bytes.is_empty() { -/// let (ch, size) = decode_utf8_lossy(bytes); -/// bytes = &bytes[size..]; -/// chars.push(ch); -/// } -/// assert_eq!(vec!['☃', '\u{FFFD}', '𝞃', '\u{FFFD}', 'a'], chars); -/// ``` -#[inline] -pub fn decode_lossy>(slice: B) -> (char, usize) { - match decode(slice) { - (Some(ch), size) => (ch, size), - (None, size) => ('\u{FFFD}', size), - } -} - -/// UTF-8 decode a single Unicode scalar value from the end of a slice. -/// -/// When successful, the corresponding Unicode scalar value is returned along -/// with the number of bytes it was encoded with. The number of bytes consumed -/// for a successful decode is always between 1 and 4, inclusive. -/// -/// When unsuccessful, `None` is returned along with the number of bytes that -/// make up a maximal prefix of a valid UTF-8 code unit sequence. In this case, -/// the number of bytes consumed is always between 0 and 3, inclusive, where -/// 0 is only returned when `slice` is empty. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use bstr::decode_last_utf8; -/// -/// // Decoding a valid codepoint. -/// let (ch, size) = decode_last_utf8(b"\xE2\x98\x83"); -/// assert_eq!(Some('☃'), ch); -/// assert_eq!(3, size); -/// -/// // Decoding an incomplete codepoint. -/// let (ch, size) = decode_last_utf8(b"\xE2\x98"); -/// assert_eq!(None, ch); -/// assert_eq!(2, size); -/// ``` -/// -/// This example shows how to iterate over all codepoints in UTF-8 encoded -/// bytes in reverse, while replacing invalid UTF-8 sequences with the -/// replacement codepoint: -/// -/// ``` -/// use bstr::{B, decode_last_utf8}; -/// -/// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); -/// let mut chars = vec![]; -/// while !bytes.is_empty() { -/// let (ch, size) = decode_last_utf8(bytes); -/// bytes = &bytes[..bytes.len()-size]; -/// chars.push(ch.unwrap_or('\u{FFFD}')); -/// } -/// assert_eq!(vec!['a', '\u{FFFD}', '𝞃', '\u{FFFD}', '☃'], chars); -/// ``` -#[inline] -pub fn decode_last>(slice: B) -> (Option, usize) { - // TODO: We could implement this by reversing the UTF-8 automaton, but for - // now, we do it the slow way by using the forward automaton. - - let slice = slice.as_ref(); - if slice.is_empty() { - return (None, 0); - } - let mut start = slice.len() - 1; - let limit = slice.len().saturating_sub(4); - while start > limit && !is_leading_or_invalid_utf8_byte(slice[start]) { - start -= 1; - } - let (ch, size) = decode(&slice[start..]); - // If we didn't consume all of the bytes, then that means there's at least - // one stray byte that never occurs in a valid code unit prefix, so we can - // advance by one byte. - if start + size != slice.len() { - (None, 1) - } else { - (ch, size) - } -} - -/// Lossily UTF-8 decode a single Unicode scalar value from the end of a slice. -/// -/// When successful, the corresponding Unicode scalar value is returned along -/// with the number of bytes it was encoded with. The number of bytes consumed -/// for a successful decode is always between 1 and 4, inclusive. -/// -/// When unsuccessful, the Unicode replacement codepoint (`U+FFFD`) is returned -/// along with the number of bytes that make up a maximal prefix of a valid -/// UTF-8 code unit sequence. In this case, the number of bytes consumed is -/// always between 0 and 3, inclusive, where 0 is only returned when `slice` is -/// empty. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ```ignore -/// use bstr::decode_last_utf8_lossy; -/// -/// // Decoding a valid codepoint. -/// let (ch, size) = decode_last_utf8_lossy(b"\xE2\x98\x83"); -/// assert_eq!('☃', ch); -/// assert_eq!(3, size); -/// -/// // Decoding an incomplete codepoint. -/// let (ch, size) = decode_last_utf8_lossy(b"\xE2\x98"); -/// assert_eq!('\u{FFFD}', ch); -/// assert_eq!(2, size); -/// ``` -/// -/// This example shows how to iterate over all codepoints in UTF-8 encoded -/// bytes in reverse, while replacing invalid UTF-8 sequences with the -/// replacement codepoint: -/// -/// ```ignore -/// use bstr::decode_last_utf8_lossy; -/// -/// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); -/// let mut chars = vec![]; -/// while !bytes.is_empty() { -/// let (ch, size) = decode_last_utf8_lossy(bytes); -/// bytes = &bytes[..bytes.len()-size]; -/// chars.push(ch); -/// } -/// assert_eq!(vec!['a', '\u{FFFD}', '𝞃', '\u{FFFD}', '☃'], chars); -/// ``` -#[inline] -pub fn decode_last_lossy>(slice: B) -> (char, usize) { - match decode_last(slice) { - (Some(ch), size) => (ch, size), - (None, size) => ('\u{FFFD}', size), - } -} - -/// SAFETY: The decode function relies on state being equal to ACCEPT only if -/// cp is a valid Unicode scalar value. -#[inline] -pub fn decode_step(state: &mut usize, cp: &mut u32, b: u8) { - let class = CLASSES[b as usize]; - if *state == ACCEPT { - *cp = (0xFF >> class) & (b as u32); - } else { - *cp = (b as u32 & 0b111111) | (*cp << 6); - } - *state = STATES_FORWARD[*state + class as usize] as usize; -} - -/// Returns true if and only if the given byte is either a valid leading UTF-8 -/// byte, or is otherwise an invalid byte that can never appear anywhere in a -/// valid UTF-8 sequence. -fn is_leading_or_invalid_utf8_byte(b: u8) -> bool { - // In the ASCII case, the most significant bit is never set. The leading - // byte of a 2/3/4-byte sequence always has the top two most significant - // bits set. For bytes that can never appear anywhere in valid UTF-8, this - // also returns true, since every such byte has its two most significant - // bits set: - // - // \xC0 :: 11000000 - // \xC1 :: 11000001 - // \xF5 :: 11110101 - // \xF6 :: 11110110 - // \xF7 :: 11110111 - // \xF8 :: 11111000 - // \xF9 :: 11111001 - // \xFA :: 11111010 - // \xFB :: 11111011 - // \xFC :: 11111100 - // \xFD :: 11111101 - // \xFE :: 11111110 - // \xFF :: 11111111 - (b & 0b1100_0000) != 0b1000_0000 -} - -#[cfg(test)] -mod tests { - use std::char; - - use crate::ext_slice::{ByteSlice, B}; - use crate::tests::LOSSY_TESTS; - use crate::utf8::{self, Utf8Error}; - - fn utf8e(valid_up_to: usize) -> Utf8Error { - Utf8Error { valid_up_to, error_len: None } - } - - fn utf8e2(valid_up_to: usize, error_len: usize) -> Utf8Error { - Utf8Error { valid_up_to, error_len: Some(error_len) } - } - - #[test] - fn validate_all_codepoints() { - for i in 0..(0x10FFFF + 1) { - let cp = match char::from_u32(i) { - None => continue, - Some(cp) => cp, - }; - let mut buf = [0; 4]; - let s = cp.encode_utf8(&mut buf); - assert_eq!(Ok(()), utf8::validate(s.as_bytes())); - } - } - - #[test] - fn validate_multiple_codepoints() { - assert_eq!(Ok(()), utf8::validate(b"abc")); - assert_eq!(Ok(()), utf8::validate(b"a\xE2\x98\x83a")); - assert_eq!(Ok(()), utf8::validate(b"a\xF0\x9D\x9C\xB7a")); - assert_eq!(Ok(()), utf8::validate(b"\xE2\x98\x83\xF0\x9D\x9C\xB7",)); - assert_eq!( - Ok(()), - utf8::validate(b"a\xE2\x98\x83a\xF0\x9D\x9C\xB7a",) - ); - assert_eq!( - Ok(()), - utf8::validate(b"\xEF\xBF\xBD\xE2\x98\x83\xEF\xBF\xBD",) - ); - } - - #[test] - fn validate_errors() { - // single invalid byte - assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xFF")); - // single invalid byte after ASCII - assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xFF")); - // single invalid byte after 2 byte sequence - assert_eq!(Err(utf8e2(2, 1)), utf8::validate(b"\xCE\xB2\xFF")); - // single invalid byte after 3 byte sequence - assert_eq!(Err(utf8e2(3, 1)), utf8::validate(b"\xE2\x98\x83\xFF")); - // single invalid byte after 4 byte sequence - assert_eq!(Err(utf8e2(4, 1)), utf8::validate(b"\xF0\x9D\x9D\xB1\xFF")); - - // An invalid 2-byte sequence with a valid 1-byte prefix. - assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xCE\xF0")); - // An invalid 3-byte sequence with a valid 2-byte prefix. - assert_eq!(Err(utf8e2(0, 2)), utf8::validate(b"\xE2\x98\xF0")); - // An invalid 4-byte sequence with a valid 3-byte prefix. - assert_eq!(Err(utf8e2(0, 3)), utf8::validate(b"\xF0\x9D\x9D\xF0")); - - // An overlong sequence. Should be \xE2\x82\xAC, but we encode the - // same codepoint value in 4 bytes. This not only tests that we reject - // overlong sequences, but that we get valid_up_to correct. - assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xF0\x82\x82\xAC")); - assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xF0\x82\x82\xAC")); - assert_eq!( - Err(utf8e2(3, 1)), - utf8::validate(b"\xE2\x98\x83\xF0\x82\x82\xAC",) - ); - - // Check that encoding a surrogate codepoint using the UTF-8 scheme - // fails validation. - assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xED\xA0\x80")); - assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xED\xA0\x80")); - assert_eq!( - Err(utf8e2(3, 1)), - utf8::validate(b"\xE2\x98\x83\xED\xA0\x80",) - ); - - // Check that an incomplete 2-byte sequence fails. - assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xCEa")); - assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xCEa")); - assert_eq!( - Err(utf8e2(3, 1)), - utf8::validate(b"\xE2\x98\x83\xCE\xE2\x98\x83",) - ); - // Check that an incomplete 3-byte sequence fails. - assert_eq!(Err(utf8e2(0, 2)), utf8::validate(b"\xE2\x98a")); - assert_eq!(Err(utf8e2(1, 2)), utf8::validate(b"a\xE2\x98a")); - assert_eq!( - Err(utf8e2(3, 2)), - utf8::validate(b"\xE2\x98\x83\xE2\x98\xE2\x98\x83",) - ); - // Check that an incomplete 4-byte sequence fails. - assert_eq!(Err(utf8e2(0, 3)), utf8::validate(b"\xF0\x9D\x9Ca")); - assert_eq!(Err(utf8e2(1, 3)), utf8::validate(b"a\xF0\x9D\x9Ca")); - assert_eq!( - Err(utf8e2(4, 3)), - utf8::validate(b"\xF0\x9D\x9C\xB1\xF0\x9D\x9C\xE2\x98\x83",) - ); - assert_eq!( - Err(utf8e2(6, 3)), - utf8::validate(b"foobar\xF1\x80\x80quux",) - ); - - // Check that an incomplete (EOF) 2-byte sequence fails. - assert_eq!(Err(utf8e(0)), utf8::validate(b"\xCE")); - assert_eq!(Err(utf8e(1)), utf8::validate(b"a\xCE")); - assert_eq!(Err(utf8e(3)), utf8::validate(b"\xE2\x98\x83\xCE")); - // Check that an incomplete (EOF) 3-byte sequence fails. - assert_eq!(Err(utf8e(0)), utf8::validate(b"\xE2\x98")); - assert_eq!(Err(utf8e(1)), utf8::validate(b"a\xE2\x98")); - assert_eq!(Err(utf8e(3)), utf8::validate(b"\xE2\x98\x83\xE2\x98")); - // Check that an incomplete (EOF) 4-byte sequence fails. - assert_eq!(Err(utf8e(0)), utf8::validate(b"\xF0\x9D\x9C")); - assert_eq!(Err(utf8e(1)), utf8::validate(b"a\xF0\x9D\x9C")); - assert_eq!( - Err(utf8e(4)), - utf8::validate(b"\xF0\x9D\x9C\xB1\xF0\x9D\x9C",) - ); - - // Test that we errors correct even after long valid sequences. This - // checks that our "backup" logic for detecting errors is correct. - assert_eq!( - Err(utf8e2(8, 1)), - utf8::validate(b"\xe2\x98\x83\xce\xb2\xe3\x83\x84\xFF",) - ); - } - - #[test] - fn decode_valid() { - fn d(mut s: &str) -> Vec { - let mut chars = vec![]; - while !s.is_empty() { - let (ch, size) = utf8::decode(s.as_bytes()); - s = &s[size..]; - chars.push(ch.unwrap()); - } - chars - } - - assert_eq!(vec!['☃'], d("☃")); - assert_eq!(vec!['☃', '☃'], d("☃☃")); - assert_eq!(vec!['α', 'β', 'γ', 'δ', 'ε'], d("αβγδε")); - assert_eq!(vec!['☃', '⛄', '⛇'], d("☃⛄⛇")); - assert_eq!(vec!['𝗮', '𝗯', '𝗰', '𝗱', '𝗲'], d("𝗮𝗯𝗰𝗱𝗲")); - } - - #[test] - fn decode_invalid() { - let (ch, size) = utf8::decode(b""); - assert_eq!(None, ch); - assert_eq!(0, size); - - let (ch, size) = utf8::decode(b"\xFF"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode(b"\xCE\xF0"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode(b"\xE2\x98\xF0"); - assert_eq!(None, ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode(b"\xF0\x9D\x9D"); - assert_eq!(None, ch); - assert_eq!(3, size); - - let (ch, size) = utf8::decode(b"\xF0\x9D\x9D\xF0"); - assert_eq!(None, ch); - assert_eq!(3, size); - - let (ch, size) = utf8::decode(b"\xF0\x82\x82\xAC"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode(b"\xED\xA0\x80"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode(b"\xCEa"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode(b"\xE2\x98a"); - assert_eq!(None, ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode(b"\xF0\x9D\x9Ca"); - assert_eq!(None, ch); - assert_eq!(3, size); - } - - #[test] - fn decode_lossy() { - let (ch, size) = utf8::decode_lossy(b""); - assert_eq!('\u{FFFD}', ch); - assert_eq!(0, size); - - let (ch, size) = utf8::decode_lossy(b"\xFF"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_lossy(b"\xCE\xF0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_lossy(b"\xE2\x98\xF0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode_lossy(b"\xF0\x9D\x9D\xF0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(3, size); - - let (ch, size) = utf8::decode_lossy(b"\xF0\x82\x82\xAC"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_lossy(b"\xED\xA0\x80"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_lossy(b"\xCEa"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_lossy(b"\xE2\x98a"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode_lossy(b"\xF0\x9D\x9Ca"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(3, size); - } - - #[test] - fn decode_last_valid() { - fn d(mut s: &str) -> Vec { - let mut chars = vec![]; - while !s.is_empty() { - let (ch, size) = utf8::decode_last(s.as_bytes()); - s = &s[..s.len() - size]; - chars.push(ch.unwrap()); - } - chars - } - - assert_eq!(vec!['☃'], d("☃")); - assert_eq!(vec!['☃', '☃'], d("☃☃")); - assert_eq!(vec!['ε', 'δ', 'γ', 'β', 'α'], d("αβγδε")); - assert_eq!(vec!['⛇', '⛄', '☃'], d("☃⛄⛇")); - assert_eq!(vec!['𝗲', '𝗱', '𝗰', '𝗯', '𝗮'], d("𝗮𝗯𝗰𝗱𝗲")); - } - - #[test] - fn decode_last_invalid() { - let (ch, size) = utf8::decode_last(b""); - assert_eq!(None, ch); - assert_eq!(0, size); - - let (ch, size) = utf8::decode_last(b"\xFF"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xCE\xF0"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xCE"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xE2\x98\xF0"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xE2\x98"); - assert_eq!(None, ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode_last(b"\xF0\x9D\x9D\xF0"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xF0\x9D\x9D"); - assert_eq!(None, ch); - assert_eq!(3, size); - - let (ch, size) = utf8::decode_last(b"\xF0\x82\x82\xAC"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xED\xA0\x80"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xED\xA0"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"\xED"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"a\xCE"); - assert_eq!(None, ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last(b"a\xE2\x98"); - assert_eq!(None, ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode_last(b"a\xF0\x9D\x9C"); - assert_eq!(None, ch); - assert_eq!(3, size); - } - - #[test] - fn decode_last_lossy() { - let (ch, size) = utf8::decode_last_lossy(b""); - assert_eq!('\u{FFFD}', ch); - assert_eq!(0, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xFF"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xCE\xF0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xCE"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xE2\x98\xF0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xE2\x98"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xF0\x9D\x9D\xF0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xF0\x9D\x9D"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(3, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xF0\x82\x82\xAC"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xED\xA0\x80"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xED\xA0"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"\xED"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"a\xCE"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(1, size); - - let (ch, size) = utf8::decode_last_lossy(b"a\xE2\x98"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(2, size); - - let (ch, size) = utf8::decode_last_lossy(b"a\xF0\x9D\x9C"); - assert_eq!('\u{FFFD}', ch); - assert_eq!(3, size); - } - - #[test] - fn chars() { - for (i, &(expected, input)) in LOSSY_TESTS.iter().enumerate() { - let got: String = B(input).chars().collect(); - assert_eq!( - expected, got, - "chars(ith: {:?}, given: {:?})", - i, input, - ); - let got: String = - B(input).char_indices().map(|(_, _, ch)| ch).collect(); - assert_eq!( - expected, got, - "char_indices(ith: {:?}, given: {:?})", - i, input, - ); - - let expected: String = expected.chars().rev().collect(); - - let got: String = B(input).chars().rev().collect(); - assert_eq!( - expected, got, - "chars.rev(ith: {:?}, given: {:?})", - i, input, - ); - let got: String = - B(input).char_indices().rev().map(|(_, _, ch)| ch).collect(); - assert_eq!( - expected, got, - "char_indices.rev(ith: {:?}, given: {:?})", - i, input, - ); - } - } - - #[test] - fn utf8_chunks() { - let mut c = utf8::Utf8Chunks { bytes: b"123\xC0" }; - assert_eq!( - (c.next(), c.next()), - ( - Some(utf8::Utf8Chunk { - valid: "123", - invalid: b"\xC0".as_bstr(), - incomplete: false, - }), - None, - ) - ); - - let mut c = utf8::Utf8Chunks { bytes: b"123\xFF\xFF" }; - assert_eq!( - (c.next(), c.next(), c.next()), - ( - Some(utf8::Utf8Chunk { - valid: "123", - invalid: b"\xFF".as_bstr(), - incomplete: false, - }), - Some(utf8::Utf8Chunk { - valid: "", - invalid: b"\xFF".as_bstr(), - incomplete: false, - }), - None, - ) - ); - - let mut c = utf8::Utf8Chunks { bytes: b"123\xD0" }; - assert_eq!( - (c.next(), c.next()), - ( - Some(utf8::Utf8Chunk { - valid: "123", - invalid: b"\xD0".as_bstr(), - incomplete: true, - }), - None, - ) - ); - - let mut c = utf8::Utf8Chunks { bytes: b"123\xD0456" }; - assert_eq!( - (c.next(), c.next(), c.next()), - ( - Some(utf8::Utf8Chunk { - valid: "123", - invalid: b"\xD0".as_bstr(), - incomplete: false, - }), - Some(utf8::Utf8Chunk { - valid: "456", - invalid: b"".as_bstr(), - incomplete: false, - }), - None, - ) - ); - - let mut c = utf8::Utf8Chunks { bytes: b"123\xE2\x98" }; - assert_eq!( - (c.next(), c.next()), - ( - Some(utf8::Utf8Chunk { - valid: "123", - invalid: b"\xE2\x98".as_bstr(), - incomplete: true, - }), - None, - ) - ); - - let mut c = utf8::Utf8Chunks { bytes: b"123\xF4\x8F\xBF" }; - assert_eq!( - (c.next(), c.next()), - ( - Some(utf8::Utf8Chunk { - valid: "123", - invalid: b"\xF4\x8F\xBF".as_bstr(), - incomplete: true, - }), - None, - ) - ); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/.cargo-checksum.json cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/.cargo-checksum.json --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/.cargo-checksum.json 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/.cargo-checksum.json 2023-02-01 05:24:55.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c"} \ No newline at end of file +{"files":{},"package":"89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"} \ No newline at end of file diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/Cargo.toml 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/Cargo.toml 2023-02-01 05:24:55.000000000 +0000 @@ -12,7 +12,7 @@ [package] edition = "2018" name = "bytes" -version = "1.3.0" +version = "1.4.0" authors = [ "Carl Lerche ", "Sean McArthur ", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/CHANGELOG.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/CHANGELOG.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/CHANGELOG.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/CHANGELOG.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,3 +1,18 @@ +# 1.4.0 (January 31, 2023) + +### Added + +- Make `IntoIter` constructor public (#581) + +### Fixed + +- Avoid large reallocations when freezing `BytesMut` (#592) + +### Documented + +- Document which functions require `std` (#591) +- Fix duplicate "the the" typos (#585) + # 1.3.0 (November 20, 2022) ### Added diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/README.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/README.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/README.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/README.md 2023-02-01 05:24:55.000000000 +0000 @@ -36,6 +36,15 @@ bytes = { version = "1", features = ["serde"] } ``` +## Building documentation + +When building the `bytes` documentation the `docsrs` option should be used, otherwise +feature gates will not be shown. This requires a nightly toolchain: + +``` +RUSTDOCFLAGS="--cfg docsrs" cargo +nightly doc +``` + ## License This project is licensed under the [MIT license](LICENSE). diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_impl.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_impl.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_impl.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_impl.rs 2023-02-01 05:24:55.000000000 +0000 @@ -160,6 +160,7 @@ /// /// [`writev`]: http://man7.org/linux/man-pages/man2/readv.2.html #[cfg(feature = "std")] + #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize { if dst.is_empty() { return 0; @@ -1183,6 +1184,7 @@ /// assert_eq!(&dst[..11], &b"hello world"[..]); /// ``` #[cfg(feature = "std")] + #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn reader(self) -> Reader where Self: Sized, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_mut.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_mut.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_mut.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/buf/buf_mut.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1239,6 +1239,7 @@ /// assert_eq!(*buf, b"hello world"[..]); /// ``` #[cfg(feature = "std")] + #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn writer(self) -> Writer where Self: Sized, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/buf/iter.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/buf/iter.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/buf/iter.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/buf/iter.rs 2023-02-01 05:24:55.000000000 +0000 @@ -2,8 +2,6 @@ /// Iterator over the bytes contained by the buffer. /// -/// This struct is created by the [`iter`] method on [`Buf`]. -/// /// # Examples /// /// Basic usage: @@ -43,7 +41,7 @@ /// assert_eq!(iter.next(), Some(b'c')); /// assert_eq!(iter.next(), None); /// ``` - pub(crate) fn new(inner: T) -> IntoIter { + pub fn new(inner: T) -> IntoIter { IntoIter { inner } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/bytes.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/bytes.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/bytes.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/bytes.rs 2023-02-01 05:24:55.000000000 +0000 @@ -32,7 +32,7 @@ /// All `Bytes` implementations must fulfill the following requirements: /// - They are cheaply cloneable and thereby shareable between an unlimited amount /// of components, for example by modifying a reference count. -/// - Instances can be sliced to refer to a subset of the the original buffer. +/// - Instances can be sliced to refer to a subset of the original buffer. /// /// ``` /// use bytes::Bytes; @@ -71,7 +71,7 @@ /// /// For `Bytes` implementations which point to a reference counted shared storage /// (e.g. an `Arc<[u8]>`), sharing will be implemented by increasing the -/// the reference count. +/// reference count. /// /// Due to this mechanism, multiple `Bytes` instances may point to the same /// shared memory region. @@ -807,8 +807,36 @@ impl From> for Bytes { fn from(vec: Vec) -> Bytes { - let slice = vec.into_boxed_slice(); - slice.into() + let mut vec = vec; + let ptr = vec.as_mut_ptr(); + let len = vec.len(); + let cap = vec.capacity(); + + // Avoid an extra allocation if possible. + if len == cap { + return Bytes::from(vec.into_boxed_slice()); + } + + let shared = Box::new(Shared { + buf: ptr, + cap, + ref_cnt: AtomicUsize::new(1), + }); + mem::forget(vec); + + let shared = Box::into_raw(shared); + // The pointer should be aligned, so this assert should + // always succeed. + debug_assert!( + 0 == (shared as usize & KIND_MASK), + "internal: Box should have an aligned pointer", + ); + Bytes { + ptr, + len, + data: AtomicPtr::new(shared as _), + vtable: &SHARED_VTABLE, + } } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/src/lib.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/src/lib.rs 2023-02-01 05:24:55.000000000 +0000 @@ -4,6 +4,7 @@ attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables)) ))] #![no_std] +#![cfg_attr(docsrs, feature(doc_cfg))] //! Provides abstractions for working with bytes. //! diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/tests/test_bytes.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/tests/test_bytes.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/bytes/tests/test_bytes.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/bytes/tests/test_bytes.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1163,3 +1163,48 @@ assert_eq!(Vec::from(b2), vec[20..]); assert_eq!(Vec::from(b1), vec[..20]); } + +#[test] +fn test_bytes_vec_conversion() { + let mut vec = Vec::with_capacity(10); + vec.extend(b"abcdefg"); + let b = Bytes::from(vec); + let v = Vec::from(b); + assert_eq!(v.len(), 7); + assert_eq!(v.capacity(), 10); + + let mut b = Bytes::from(v); + b.advance(1); + let v = Vec::from(b); + assert_eq!(v.len(), 6); + assert_eq!(v.capacity(), 10); + assert_eq!(v.as_slice(), b"bcdefg"); +} + +#[test] +fn test_bytes_mut_conversion() { + let mut b1 = BytesMut::with_capacity(10); + b1.extend(b"abcdefg"); + let b2 = Bytes::from(b1); + let v = Vec::from(b2); + assert_eq!(v.len(), 7); + assert_eq!(v.capacity(), 10); + + let mut b = Bytes::from(v); + b.advance(1); + let v = Vec::from(b); + assert_eq!(v.len(), 6); + assert_eq!(v.capacity(), 10); + assert_eq!(v.as_slice(), b"bcdefg"); +} + +#[test] +fn test_bytes_capacity_len() { + for cap in 0..100 { + for len in 0..=cap { + let mut v = Vec::with_capacity(cap); + v.resize(len, 0); + let _ = Bytes::from(v); + } + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/.cargo-checksum.json cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/.cargo-checksum.json --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/.cargo-checksum.json 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/.cargo-checksum.json 2023-02-01 05:24:55.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d"} \ No newline at end of file +{"files":{},"package":"50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"} \ No newline at end of file diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/Cargo.lock cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/Cargo.lock --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/Cargo.lock 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/Cargo.lock 2023-02-01 05:24:55.000000000 +0000 @@ -10,7 +10,7 @@ [[package]] name = "cc" -version = "1.0.78" +version = "1.0.79" dependencies = [ "jobserver", "tempfile", @@ -51,9 +51,9 @@ [[package]] name = "libc" -version = "0.2.138" +version = "0.2.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8" +checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" [[package]] name = "redox_syscall" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/Cargo.toml 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/Cargo.toml 2023-02-01 05:24:55.000000000 +0000 @@ -12,7 +12,7 @@ [package] edition = "2018" name = "cc" -version = "1.0.78" +version = "1.0.79" authors = ["Alex Crichton "] exclude = ["/.github"] description = """ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/src/bin/gcc-shim.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/src/bin/gcc-shim.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/src/bin/gcc-shim.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/src/bin/gcc-shim.rs 2023-02-01 05:24:55.000000000 +0000 @@ -2,7 +2,7 @@ use std::env; use std::fs::File; -use std::io::prelude::*; +use std::io::{self, prelude::*}; use std::path::PathBuf; fn main() { @@ -10,39 +10,61 @@ let program = args.next().expect("Unexpected empty args"); let out_dir = PathBuf::from( - env::var_os("GCCTEST_OUT_DIR").expect(&format!("{}: GCCTEST_OUT_DIR not found", program)), + env::var_os("GCCTEST_OUT_DIR") + .unwrap_or_else(|| panic!("{}: GCCTEST_OUT_DIR not found", program)), ); // Find the first nonexistent candidate file to which the program's args can be written. - for i in 0.. { - let candidate = &out_dir.join(format!("out{}", i)); + let candidate = (0..).find_map(|i| { + let candidate = out_dir.join(format!("out{}", i)); - // If the file exists, commands have already run. Try again. if candidate.exists() { - continue; + // If the file exists, commands have already run. Try again. + None + } else { + Some(candidate) } + }).unwrap_or_else(|| panic!("Cannot find the first nonexistent candidate file to which the program's args can be written under out_dir '{}'", out_dir.display())); - // Create a file and record the args passed to the command. - let mut f = File::create(candidate).expect(&format!( - "{}: can't create candidate: {}", + // Create a file and record the args passed to the command. + let f = File::create(&candidate).unwrap_or_else(|e| { + panic!( + "{}: can't create candidate: {}, error: {}", program, - candidate.to_string_lossy() - )); + candidate.display(), + e + ) + }); + let mut f = io::BufWriter::new(f); + + (|| { for arg in args { - writeln!(f, "{}", arg).expect(&format!( - "{}: can't write to candidate: {}", - program, - candidate.to_string_lossy() - )); + writeln!(f, "{}", arg)?; } - break; - } + + f.flush()?; + + let mut f = f.into_inner()?; + f.flush()?; + f.sync_all() + })() + .unwrap_or_else(|e| { + panic!( + "{}: can't write to candidate: {}, error: {}", + program, + candidate.display(), + e + ) + }); // Create a file used by some tests. let path = &out_dir.join("libfoo.a"); - File::create(path).expect(&format!( - "{}: can't create libfoo.a: {}", - program, - path.to_string_lossy() - )); + File::create(path).unwrap_or_else(|e| { + panic!( + "{}: can't create libfoo.a: {}, error: {}", + program, + path.display(), + e + ) + }); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/src/lib.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/src/lib.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/src/lib.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/src/lib.rs 2023-02-01 05:24:55.000000000 +0000 @@ -115,6 +115,7 @@ env: Vec<(OsString, OsString)>, compiler: Option, archiver: Option, + ranlib: Option, cargo_metadata: bool, link_lib_modifiers: Vec, pic: Option, @@ -320,6 +321,7 @@ env: Vec::new(), compiler: None, archiver: None, + ranlib: None, cargo_metadata: true, link_lib_modifiers: Vec::new(), pic: None, @@ -538,7 +540,7 @@ cmd.arg(&src); let output = cmd.output()?; - let is_supported = output.stderr.is_empty(); + let is_supported = output.status.success() && output.stderr.is_empty(); known_status.insert(flag.to_owned(), is_supported); Ok(is_supported) @@ -916,6 +918,17 @@ self.archiver = Some(archiver.as_ref().to_owned()); self } + + /// Configures the tool used to index archives. + /// + /// This option is automatically determined from the target platform or a + /// number of environment variables, so it's not required to call this + /// function. + pub fn ranlib>(&mut self, ranlib: P) -> &mut Build { + self.ranlib = Some(ranlib.as_ref().to_owned()); + self + } + /// Define whether metadata should be emitted for cargo allowing it to /// automatically link the binary. Defaults to `true`. /// @@ -2094,7 +2107,11 @@ // Non-msvc targets (those using `ar`) need a separate step to add // the symbol table to archives since our construction command of // `cq` doesn't add it for us. - let (mut ar, cmd) = self.get_ar()?; + let (mut ar, cmd, _any_flags) = self.get_ar()?; + + // NOTE: We add `s` even if flags were passed using $ARFLAGS/ar_flag, because `s` + // here represents a _mode_, not an arbitrary flag. Further discussion of this choice + // can be seen in https://github.com/rust-lang/cc-rs/pull/763. run(ar.arg("s").arg(dst), &cmd)?; } @@ -2105,12 +2122,16 @@ let target = self.get_target()?; if target.contains("msvc") { - let (mut cmd, program) = self.get_ar()?; + let (mut cmd, program, any_flags) = self.get_ar()?; + // NOTE: -out: here is an I/O flag, and so must be included even if $ARFLAGS/ar_flag is + // in use. -nologo on the other hand is just a regular flag, and one that we'll skip if + // the caller has explicitly dictated the flags they want. See + // https://github.com/rust-lang/cc-rs/pull/763 for further discussion. let mut out = OsString::from("-out:"); out.push(dst); - cmd.arg(out).arg("-nologo"); - for flag in self.ar_flags.iter() { - cmd.arg(flag); + cmd.arg(out); + if !any_flags { + cmd.arg("-nologo"); } // If the library file already exists, add the library name // as an argument to let lib.exe know we are appending the objs. @@ -2120,7 +2141,7 @@ cmd.args(objs); run(&mut cmd, &program)?; } else { - let (mut ar, cmd) = self.get_ar()?; + let (mut ar, cmd, _any_flags) = self.get_ar()?; // Set an environment variable to tell the OSX archiver to ensure // that all dates listed in the archive are zero, improving @@ -2145,9 +2166,10 @@ // In any case if this doesn't end up getting read, it shouldn't // cause that many issues! ar.env("ZERO_AR_DATE", "1"); - for flag in self.ar_flags.iter() { - ar.arg(flag); - } + + // NOTE: We add cq here regardless of whether $ARFLAGS/ar_flag have been used because + // it dictates the _mode_ ar runs in, which the setter of $ARFLAGS/ar_flag can't + // dictate. See https://github.com/rust-lang/cc-rs/pull/763 for further discussion. run(ar.arg("cq").arg(dst).args(objs), &cmd)?; } @@ -2212,7 +2234,7 @@ } } else if is_sim { match arch { - "arm64" | "aarch64" => ArchSpec::Simulator("-arch arm64"), + "arm64" | "aarch64" => ArchSpec::Simulator("arm64"), "x86_64" => ArchSpec::Simulator("-m64"), _ => { return Err(Error::new( @@ -2262,7 +2284,13 @@ format!("{}os", sdk_prefix) } ArchSpec::Simulator(arch) => { - cmd.args.push(arch.into()); + if arch.starts_with('-') { + // -m32 or -m64 + cmd.args.push(arch.into()); + } else { + cmd.args.push("-arch".into()); + cmd.args.push(arch.into()); + } cmd.args .push(format!("-m{}simulator-version-min={}", sim_prefix, min_version).into()); format!("{}simulator", sdk_prefix) @@ -2279,18 +2307,8 @@ cmd.args.push("-isysroot".into()); cmd.args.push(sdk_path); + // TODO: Remove this once Apple stops accepting apps built with Xcode 13 cmd.args.push("-fembed-bitcode".into()); - /* - * TODO we probably ultimately want the -fembed-bitcode-marker flag - * but can't have it now because of an issue in LLVM: - * https://github.com/rust-lang/cc-rs/issues/301 - * https://github.com/rust-lang/rust/pull/48896#comment-372192660 - */ - /* - if self.get_opt_level()? == "0" { - cmd.args.push("-fembed-bitcode-marker".into()); - } - */ Ok(()) } @@ -2643,81 +2661,206 @@ } } - fn get_ar(&self) -> Result<(Command, String), Error> { - if let Some(ref p) = self.archiver { - let name = p.file_name().and_then(|s| s.to_str()).unwrap_or("ar"); - return Ok((self.cmd(p), name.to_string())); + fn get_ar(&self) -> Result<(Command, String, bool), Error> { + self.try_get_archiver_and_flags() + } + + /// Get the archiver (ar) that's in use for this configuration. + /// + /// You can use [`Command::get_program`] to get just the path to the command. + /// + /// This method will take into account all configuration such as debug + /// information, optimization level, include directories, defines, etc. + /// Additionally, the compiler binary in use follows the standard + /// conventions for this path, e.g. looking at the explicitly set compiler, + /// environment variables (a number of which are inspected here), and then + /// falling back to the default configuration. + /// + /// # Panics + /// + /// Panics if an error occurred while determining the architecture. + pub fn get_archiver(&self) -> Command { + match self.try_get_archiver() { + Ok(tool) => tool, + Err(e) => fail(&e.message), + } + } + + /// Get the archiver that's in use for this configuration. + /// + /// This will return a result instead of panicing; + /// see [`get_archiver()`] for the complete description. + pub fn try_get_archiver(&self) -> Result { + Ok(self.try_get_archiver_and_flags()?.0) + } + + fn try_get_archiver_and_flags(&self) -> Result<(Command, String, bool), Error> { + let (mut cmd, name) = self.get_base_archiver()?; + let flags = self.envflags("ARFLAGS"); + let mut any_flags = !flags.is_empty(); + cmd.args(flags); + for flag in &self.ar_flags { + any_flags = true; + cmd.arg(flag); } - if let Ok(p) = self.get_var("AR") { - return Ok((self.cmd(&p), p)); + Ok((cmd, name, any_flags)) + } + + fn get_base_archiver(&self) -> Result<(Command, String), Error> { + if let Some(ref a) = self.archiver { + return Ok((self.cmd(a), a.to_string_lossy().into_owned())); } - let target = self.get_target()?; - let default_ar = "ar".to_string(); - let program = if target.contains("android") { - format!("{}-ar", target.replace("armv7", "arm")) - } else if target.contains("emscripten") { - // Windows use bat files so we have to be a bit more specific - if cfg!(windows) { - let mut cmd = self.cmd("cmd"); - cmd.arg("/c").arg("emar.bat"); - return Ok((cmd, "emar.bat".to_string())); - } - "emar".to_string() - } else if target.contains("msvc") { - let compiler = self.get_base_compiler()?; - let mut lib = String::new(); - if compiler.family == (ToolFamily::Msvc { clang_cl: true }) { - // See if there is 'llvm-lib' next to 'clang-cl' - // Another possibility could be to see if there is 'clang' - // next to 'clang-cl' and use 'search_programs()' to locate - // 'llvm-lib'. This is because 'clang-cl' doesn't support - // the -print-search-dirs option. - if let Some(mut cmd) = which(&compiler.path) { - cmd.pop(); - cmd.push("llvm-lib.exe"); - if let Some(llvm_lib) = which(&cmd) { - lib = llvm_lib.to_str().unwrap().to_owned(); + self.get_base_archiver_variant("AR", "ar") + } + + /// Get the ranlib that's in use for this configuration. + /// + /// You can use [`Command::get_program`] to get just the path to the command. + /// + /// This method will take into account all configuration such as debug + /// information, optimization level, include directories, defines, etc. + /// Additionally, the compiler binary in use follows the standard + /// conventions for this path, e.g. looking at the explicitly set compiler, + /// environment variables (a number of which are inspected here), and then + /// falling back to the default configuration. + /// + /// # Panics + /// + /// Panics if an error occurred while determining the architecture. + pub fn get_ranlib(&self) -> Command { + match self.try_get_ranlib() { + Ok(tool) => tool, + Err(e) => fail(&e.message), + } + } + + /// Get the ranlib that's in use for this configuration. + /// + /// This will return a result instead of panicing; + /// see [`get_ranlib()`] for the complete description. + pub fn try_get_ranlib(&self) -> Result { + let mut cmd = self.get_base_ranlib()?; + cmd.args(self.envflags("RANLIBFLAGS")); + Ok(cmd) + } + + fn get_base_ranlib(&self) -> Result { + if let Some(ref r) = self.ranlib { + return Ok(self.cmd(r)); + } + + Ok(self.get_base_archiver_variant("RANLIB", "ranlib")?.0) + } + + fn get_base_archiver_variant(&self, env: &str, tool: &str) -> Result<(Command, String), Error> { + let target = self.get_target()?; + let mut name = String::new(); + let tool_opt: Option = self + .env_tool(env) + .map(|(tool, _wrapper, args)| { + let mut cmd = self.cmd(tool); + cmd.args(args); + cmd + }) + .or_else(|| { + if target.contains("emscripten") { + // Windows use bat files so we have to be a bit more specific + if cfg!(windows) { + let mut cmd = self.cmd("cmd"); + name = format!("em{}.bat", tool); + cmd.arg("/c").arg(&name); + Some(cmd) + } else { + name = format!("em{}", tool); + Some(self.cmd(&name)) } + } else { + None } - } - if lib.is_empty() { - lib = match windows_registry::find(&target, "lib.exe") { - Some(t) => return Ok((t, "lib.exe".to_string())), - None => "lib.exe".to_string(), - } - } - lib - } else if target.contains("illumos") { - // The default 'ar' on illumos uses a non-standard flags, - // but the OS comes bundled with a GNU-compatible variant. - // - // Use the GNU-variant to match other Unix systems. - "gar".to_string() - } else if self.get_host()? != target { - match self.prefix_for_target(&target) { - Some(p) => { - // GCC uses $target-gcc-ar, whereas binutils uses $target-ar -- try both. - // Prefer -ar if it exists, as builds of `-gcc-ar` have been observed to be - // outright broken (such as when targetting freebsd with `--disable-lto` - // toolchain where the archiver attempts to load the LTO plugin anyway but - // fails to find one). - let mut ar = default_ar; - for &infix in &["", "-gcc"] { - let target_ar = format!("{}{}-ar", p, infix); - if Command::new(&target_ar).output().is_ok() { - ar = target_ar; - break; + }); + + let default = tool.to_string(); + let tool = match tool_opt { + Some(t) => t, + None => { + if target.contains("android") { + name = format!("{}-{}", target.replace("armv7", "arm"), tool); + self.cmd(&name) + } else if target.contains("msvc") { + // NOTE: There isn't really a ranlib on msvc, so arguably we should return + // `None` somehow here. But in general, callers will already have to be aware + // of not running ranlib on Windows anyway, so it feels okay to return lib.exe + // here. + + let compiler = self.get_base_compiler()?; + let mut lib = String::new(); + if compiler.family == (ToolFamily::Msvc { clang_cl: true }) { + // See if there is 'llvm-lib' next to 'clang-cl' + // Another possibility could be to see if there is 'clang' + // next to 'clang-cl' and use 'search_programs()' to locate + // 'llvm-lib'. This is because 'clang-cl' doesn't support + // the -print-search-dirs option. + if let Some(mut cmd) = which(&compiler.path) { + cmd.pop(); + cmd.push("llvm-lib.exe"); + if let Some(llvm_lib) = which(&cmd) { + lib = llvm_lib.to_str().unwrap().to_owned(); + } + } + } + + if lib.is_empty() { + name = String::from("lib.exe"); + match windows_registry::find(&target, "lib.exe") { + Some(t) => t, + None => self.cmd("lib.exe"), + } + } else { + name = lib; + self.cmd(&name) + } + } else if target.contains("illumos") { + // The default 'ar' on illumos uses a non-standard flags, + // but the OS comes bundled with a GNU-compatible variant. + // + // Use the GNU-variant to match other Unix systems. + name = format!("g{}", tool); + self.cmd(&name) + } else if self.get_host()? != target { + match self.prefix_for_target(&target) { + Some(p) => { + // GCC uses $target-gcc-ar, whereas binutils uses $target-ar -- try both. + // Prefer -ar if it exists, as builds of `-gcc-ar` have been observed to be + // outright broken (such as when targetting freebsd with `--disable-lto` + // toolchain where the archiver attempts to load the LTO plugin anyway but + // fails to find one). + // + // The same applies to ranlib. + let mut chosen = default; + for &infix in &["", "-gcc"] { + let target_p = format!("{}{}-{}", p, infix, tool); + if Command::new(&target_p).output().is_ok() { + chosen = target_p; + break; + } + } + name = chosen; + self.cmd(&name) + } + None => { + name = default; + self.cmd(&name) } } - ar + } else { + name = default; + self.cmd(&name) } - None => default_ar, } - } else { - default_ar }; - Ok((self.cmd(&program), program)) + + Ok((tool, name)) } fn prefix_for_target(&self, target: &str) -> Option { @@ -2804,6 +2947,7 @@ "riscv64-unknown-elf", "riscv-none-embed", ]), + "riscv32imc-esp-espidf" => Some("riscv32-esp-elf"), "riscv32imc-unknown-none-elf" => self.find_working_gnu_prefix(&[ "riscv32-unknown-elf", "riscv64-unknown-elf", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/src/windows_registry.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/src/windows_registry.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/cc/src/windows_registry.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/cc/src/windows_registry.rs 2023-02-01 05:24:55.000000000 +0000 @@ -866,7 +866,9 @@ // see http://stackoverflow.com/questions/328017/path-to-msbuild pub fn find_msbuild(target: &str) -> Option { // VS 15 (2017) changed how to locate msbuild - if let Some(r) = find_msbuild_vs16(target) { + if let Some(r) = find_msbuild_vs17(target) { + return Some(r); + } else if let Some(r) = find_msbuild_vs16(target) { return Some(r); } else if let Some(r) = find_msbuild_vs15(target) { return Some(r); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/.cargo-checksum.json cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/.cargo-checksum.json --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/.cargo-checksum.json 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/.cargo-checksum.json 2023-02-01 05:24:55.000000000 +0000 @@ -1 +1 @@ -{"files":{},"package":"71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"} \ No newline at end of file +{"files":{},"package":"6bf8832993da70a4c6d13c581f4463c2bdda27b9bf1c5498dc4365543abe6d6f"} \ No newline at end of file diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/Cargo.lock cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/Cargo.lock --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/Cargo.lock 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/Cargo.lock 2023-02-01 05:24:55.000000000 +0000 @@ -18,15 +18,6 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "aho-corasick" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" -dependencies = [ - "memchr", -] - -[[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -66,9 +57,9 @@ [[package]] name = "bytes" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0b3de4a0c5e67e16066a0715723abd91edc2f9001d09c46e1dca929351e130e" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "cc" @@ -84,7 +75,7 @@ [[package]] name = "clap" -version = "3.2.23" +version = "4.0.15" dependencies = [ "atty", "backtrace", @@ -92,9 +83,7 @@ "clap_derive", "clap_lex", "humantime", - "indexmap", "once_cell", - "regex", "rustversion", "shlex", "snapbox", @@ -102,18 +91,18 @@ "strsim", "termcolor", "terminal_size", - "textwrap", "trybuild", "trycmd", + "unic-emoji-char", "unicase", - "yaml-rust", + "unicode-width", ] [[package]] name = "clap_derive" -version = "3.2.18" +version = "4.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" +checksum = "c42f169caba89a7d512b5418b09864543eeb4d497416c917d7137863bd2076ad" dependencies = [ "heck", "proc-macro-error", @@ -124,18 +113,18 @@ [[package]] name = "clap_lex" -version = "0.2.4" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8" dependencies = [ "os_str_bytes", ] [[package]] name = "combine" -version = "4.6.4" +version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a604e93b79d1808327a6fca85a6f2d69de66461e7620f5a4cbf5fb4d1d7c948" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ "bytes", "memchr", @@ -160,9 +149,9 @@ [[package]] name = "crossbeam-channel" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" dependencies = [ "cfg-if", "crossbeam-utils", @@ -170,9 +159,9 @@ [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -181,9 +170,9 @@ [[package]] name = "crossbeam-epoch" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" +checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" dependencies = [ "autocfg", "cfg-if", @@ -195,19 +184,18 @@ [[package]] name = "crossbeam-utils" -version = "0.8.10" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" +checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" dependencies = [ "cfg-if", - "once_cell", ] [[package]] name = "either" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "errno" @@ -309,30 +297,24 @@ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" [[package]] name = "libc" -version = "0.2.126" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" - -[[package]] -name = "linked-hash-map" -version = "0.5.6" +version = "0.2.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb" [[package]] name = "linux-raw-sys" @@ -400,9 +382,9 @@ [[package]] name = "once_cell" -version = "1.13.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "os_pipe" @@ -416,9 +398,9 @@ [[package]] name = "os_str_bytes" -version = "6.0.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" [[package]] name = "proc-macro-error" @@ -446,18 +428,18 @@ [[package]] name = "proc-macro2" -version = "1.0.40" +version = "1.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" +checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] @@ -487,23 +469,6 @@ ] [[package]] -name = "regex" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" - -[[package]] name = "rustc-demangle" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -525,15 +490,15 @@ [[package]] name = "rustversion" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24c8ad4f0c00e1eb5bc7614d236a7f1300e3dbd76b68cac8e06fb00b015ad8d8" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "scopeguard" @@ -543,18 +508,18 @@ [[package]] name = "serde" -version = "1.0.139" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.139" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -563,9 +528,9 @@ [[package]] name = "serde_json" -version = "1.0.82" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" +checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44" dependencies = [ "itoa", "ryu", @@ -580,15 +545,15 @@ [[package]] name = "similar" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3" +checksum = "62ac7f900db32bf3fd12e0117dd3dc4da74bc52ebaac97f39668446d89694803" [[package]] name = "snapbox" -version = "0.2.10" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "767a1d5da232b6959cd1bd5c9e8db8a7cce09c3038e89deedb49a549a2aefd93" +checksum = "f98a96656eecd1621c5830831b48eb6903a9f86aaeb61b9f358a9bd462414ddd" dependencies = [ "concolor", "normalize-line-endings", @@ -601,9 +566,9 @@ [[package]] name = "snapbox-macros" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c01dea7e04cbb27ef4c86e9922184608185f7cd95c1763bc30d727cda4a5e930" +checksum = "8a253e6f894cfa440cba00600a249fa90869d8e0ec45ab274a456e043a0ce8f2" [[package]] name = "static_assertions" @@ -619,9 +584,9 @@ [[package]] name = "syn" -version = "1.0.98" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" +checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2" dependencies = [ "proc-macro2", "quote", @@ -648,16 +613,6 @@ ] [[package]] -name = "textwrap" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" -dependencies = [ - "terminal_size", - "unicode-width", -] - -[[package]] name = "toml" version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -680,9 +635,9 @@ [[package]] name = "trybuild" -version = "1.0.63" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "764b9e244b482a9b81bde596aa37aa6f1347bf8007adab25e59f901b32b4e0a0" +checksum = "9e13556ba7dba80b3c76d1331989a341290c77efcf688eca6c307ee3066383dd" dependencies = [ "glob", "once_cell", @@ -695,9 +650,9 @@ [[package]] name = "trycmd" -version = "0.13.4" +version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb4185126cc904642173a54c185083f410c86d1202ada6761aacf7c40829f13" +checksum = "a5377b33cbe8bb69d97da63e2a2266065a642a47cc9bb3d783c28279d0029fea" dependencies = [ "escargot", "glob", @@ -711,6 +666,47 @@ ] [[package]] +name = "unic-char-property" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" +dependencies = [ + "unic-char-range", +] + +[[package]] +name = "unic-char-range" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" + +[[package]] +name = "unic-common" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" + +[[package]] +name = "unic-emoji-char" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b07221e68897210270a38bde4babb655869637af0f69407f96053a34f76494d" +dependencies = [ + "unic-char-property", + "unic-char-range", + "unic-ucd-version", +] + +[[package]] +name = "unic-ucd-version" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" +dependencies = [ + "unic-common", +] + +[[package]] name = "unicase" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -721,15 +717,15 @@ [[package]] name = "unicode-ident" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7" +checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "version_check" @@ -821,15 +817,6 @@ checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" [[package]] -name = "yaml-rust" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" -dependencies = [ - "linked-hash-map", -] - -[[package]] name = "yansi" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/Cargo.toml cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/Cargo.toml --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/Cargo.toml 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/Cargo.toml 2023-02-01 05:24:55.000000000 +0000 @@ -11,9 +11,9 @@ [package] edition = "2021" -rust-version = "1.56.1" +rust-version = "1.60.0" name = "clap" -version = "3.2.23" +version = "4.0.15" include = [ "build.rs", "src/**/*", @@ -116,6 +116,10 @@ required-features = ["derive"] [[example]] +name = "find" +required-features = ["cargo"] + +[[example]] name = "git-derive" required-features = ["derive"] @@ -134,6 +138,7 @@ [[example]] name = "repl" path = "examples/repl.rs" +required-features = ["help"] [[example]] name = "01_quick" @@ -170,11 +175,21 @@ required-features = ["cargo"] [[example]] +name = "03_02_option_mult" +path = "examples/tutorial_builder/03_02_option_mult.rs" +required-features = ["cargo"] + +[[example]] name = "03_03_positional" path = "examples/tutorial_builder/03_03_positional.rs" required-features = ["cargo"] [[example]] +name = "03_03_positional_mult" +path = "examples/tutorial_builder/03_03_positional_mult.rs" +required-features = ["cargo"] + +[[example]] name = "03_04_subcommands" path = "examples/tutorial_builder/03_04_subcommands.rs" required-features = ["cargo"] @@ -256,11 +271,21 @@ required-features = ["derive"] [[example]] +name = "03_02_option_mult_derive" +path = "examples/tutorial_derive/03_02_option_mult.rs" +required-features = ["derive"] + +[[example]] name = "03_03_positional_derive" path = "examples/tutorial_derive/03_03_positional.rs" required-features = ["derive"] [[example]] +name = "03_03_positional_mult_derive" +path = "examples/tutorial_derive/03_03_positional_mult.rs" +required-features = ["derive"] + +[[example]] name = "03_04_subcommands_derive" path = "examples/tutorial_derive/03_04_subcommands.rs" required-features = ["derive"] @@ -307,11 +332,6 @@ required-features = ["derive"] [[example]] -name = "custom-bool" -path = "examples/derive_ref/custom-bool.rs" -required-features = ["derive"] - -[[example]] name = "interop_augment_args" path = "examples/derive_ref/augment_args.rs" required-features = ["derive"] @@ -343,22 +363,15 @@ version = "1.2" [dependencies.clap_derive] -version = "=3.2.18" +version = "=4.0.13" optional = true [dependencies.clap_lex] -version = "0.2.2" - -[dependencies.indexmap] -version = "1.0" +version = "0.3.0" [dependencies.once_cell] version = "1.12.0" -[dependencies.regex] -version = "1.0" -optional = true - [dependencies.strsim] version = "0.10" optional = true @@ -371,25 +384,17 @@ version = "0.2.1" optional = true -[dependencies.textwrap] -version = "0.16" -features = [] -default-features = false - [dependencies.unicase] version = "2.6" optional = true -[dependencies.yaml-rust] -version = "0.4.1" +[dependencies.unicode-width] +version = "0.1.9" optional = true [dev-dependencies.humantime] version = "2" -[dev-dependencies.regex] -version = "1.0" - [dev-dependencies.rustversion] version = "1" @@ -400,38 +405,50 @@ version = "1.1.0" [dev-dependencies.trybuild] -version = "1.0.18" +version = "1.0.65" + +[dev-dependencies.unic-emoji-char] +version = "0.9.0" [features] cargo = [] color = [ - "atty", - "termcolor", + "dep:atty", + "dep:termcolor", ] debug = [ - "clap_derive/debug", - "backtrace", + "clap_derive?/debug", + "dep:backtrace", ] default = [ "std", "color", + "help", + "usage", + "error-context", "suggestions", ] deprecated = [] derive = [ - "clap_derive" + "dep:clap_derive" ] env = [] -std = ["indexmap/std"] -suggestions = ["strsim"] +error-context = [] +help = [] +std = [] +string = [] +suggestions = [ + "dep:strsim", + "error-context", +] unicode = [ - "textwrap/unicode-width", - "unicase", + "dep:unicode-width", + "dep:unicase", ] unstable-grouped = [] unstable-replace = [] +usage = [] wrap_help = [ - "terminal_size", - "textwrap/terminal_size", + "help", + "dep:terminal_size", ] -yaml = ["yaml-rust"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-derive-deprecated.patch cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-derive-deprecated.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-derive-deprecated.patch 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-derive-deprecated.patch 2023-02-01 05:24:55.000000000 +0000 @@ -4,7 +4,7 @@ "color", "suggestions", ] --deprecated = ["clap_derive/deprecated"] +-deprecated = ["clap_derive?/deprecated"] +deprecated = [] derive = [ "clap_derive", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-snapbox.patch cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-snapbox.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-snapbox.patch 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-snapbox.patch 2023-02-01 05:24:55.000000000 +0000 @@ -5,7 +5,7 @@ version = "1.1.0" -[dev-dependencies.snapbox] --version = "0.2.9" +-version = "0.4" - [dev-dependencies.static_assertions] version = "1.1.0" diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-trycmd.diff cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-trycmd.diff --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-trycmd.diff 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-trycmd.diff 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,6 @@ --- a/Cargo.toml +++ b/Cargo.toml -@@ -410,15 +410,6 @@ - [dev-dependencies.trybuild] - version = "1.0.18" - +@@ -413,9 +412,0 @@ -[dev-dependencies.trycmd] -version = "0.13" -features = [ @@ -13,6 +10,3 @@ -] -default-features = false - - [features] - cargo = ["once_cell"] - color = [ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-doc.patch cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-doc.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-doc.patch 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-doc.patch 2023-02-01 05:24:55.000000000 +0000 @@ -1,38 +1,25 @@ --- a/Cargo.toml +++ b/Cargo.toml -@@ -37,7 +37,6 @@ - repository = "https://github.com/clap-rs/clap" - +@@ -39,3 +39,2 @@ [package.metadata.docs.rs] -features = ["unstable-doc"] rustdoc-args = [ - "--cfg", - "docsrs", -@@ -47,9 +46,6 @@ - "-Zrustdoc-scrape-examples=examples", - ] +@@ -49,5 +48,2 @@ -[package.metadata.playground] -features = ["unstable-doc"] - [package.metadata.release] - shared-version = true - tag-name = "v{{version}}" -@@ -437,17 +433,6 @@ - "textwrap/unicode-width", - "unicase", +@@ -439,12 +435,2 @@ ] -unstable-doc = [ - "derive", - "cargo", - "wrap_help", -- "yaml", - "env", - "unicode", -- "regex", +- "string", - "unstable-replace", - "unstable-grouped", -] unstable-grouped = [] - unstable-replace = [] - unstable-v4 = [ diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v4.patch cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v4.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v4.patch 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v4.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ ---- a/Cargo.toml -+++ b/Cargo.toml -@@ -435,10 +435,6 @@ - ] - unstable-grouped = [] - unstable-replace = [] --unstable-v4 = [ -- "clap_derive/unstable-v4", -- "deprecated", --] - wrap_help = [ - "terminal_size", - "textwrap/terminal_size", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v5.patch cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v5.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v5.patch 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/disable-unstable-v5.patch 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,7 @@ +--- a/Cargo.toml ++++ b/Cargo.toml +@@ -438,4 +437,0 @@ +-unstable-v5 = [ +- "clap_derive?/unstable-v5", +- "deprecated", +-] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/once-cell-non-optional.patch cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/once-cell-non-optional.patch --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/once-cell-non-optional.patch 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/once-cell-non-optional.patch 2023-02-01 05:24:55.000000000 +0000 @@ -12,7 +12,7 @@ version = "1.0.18" [features] --cargo = ["once_cell"] +-cargo = ["dep:once_cell"] +cargo = [] color = [ "atty", @@ -21,9 +21,9 @@ ] deprecated = [] derive = [ -- "clap_derive", -- "once_cell", -+ "clap_derive" +- "dep:clap_derive", +- "dep:once_cell", ++ "dep:clap_derive" ] env = [] std = ["indexmap/std"] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/series cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/series --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/debian/patches/series 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/debian/patches/series 2023-02-01 05:24:55.000000000 +0000 @@ -1,6 +1,6 @@ disable-trycmd.diff disable-unstable-doc.patch -disable-unstable-v4.patch +disable-unstable-v5.patch disable-snapbox.patch disable-derive-deprecated.patch once-cell-non-optional.patch diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.md 2023-02-01 05:24:55.000000000 +0000 @@ -6,29 +6,24 @@ The help looks like: ```console $ cargo-example-derive --help -cargo +Usage: cargo -USAGE: - cargo +Commands: + example-derive A simple to use, efficient, and full-featured Command Line Argument Parser + help Print this message or the help of the given subcommand(s) -OPTIONS: - -h, --help Print help information - -SUBCOMMANDS: - example-derive A simple to use, efficient, and full-featured Command Line Argument Parser - help Print this message or the help of the given subcommand(s) +Options: + -h, --help Print help information $ cargo-example-derive example-derive --help -cargo-example-derive [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - cargo example-derive [OPTIONS] +Usage: cargo example-derive [OPTIONS] -OPTIONS: - -h, --help Print help information - --manifest-path - -V, --version Print version information +Options: + --manifest-path + -h, --help Print help information + -V, --version Print version information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example-derive.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,16 +1,16 @@ use clap::Parser; #[derive(Parser)] // requires `derive` feature -#[clap(name = "cargo")] -#[clap(bin_name = "cargo")] +#[command(name = "cargo")] +#[command(bin_name = "cargo")] enum Cargo { ExampleDerive(ExampleDerive), } #[derive(clap::Args)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct ExampleDerive { - #[clap(long, value_parser)] + #[arg(long)] manifest_path: Option, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.md 2023-02-01 05:24:55.000000000 +0000 @@ -6,29 +6,24 @@ The help looks like: ```console $ cargo-example --help -cargo +Usage: cargo -USAGE: - cargo +Commands: + example A simple to use, efficient, and full-featured Command Line Argument Parser + help Print this message or the help of the given subcommand(s) -OPTIONS: - -h, --help Print help information - -SUBCOMMANDS: - example A simple to use, efficient, and full-featured Command Line Argument Parser - help Print this message or the help of the given subcommand(s) +Options: + -h, --help Print help information $ cargo-example example --help -cargo-example [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - cargo example [OPTIONS] +Usage: cargo example [OPTIONS] -OPTIONS: - -h, --help Print help information - --manifest-path - -V, --version Print version information +Options: + --manifest-path + -h, --help Print help information + -V, --version Print version information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/cargo-example.rs 2023-02-01 05:24:55.000000000 +0000 @@ -5,7 +5,6 @@ .subcommand( clap::command!("example").arg( clap::arg!(--"manifest-path" ) - .required(false) .value_parser(clap::value_parser!(std::path::PathBuf)), ), ); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/demo.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/demo.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/demo.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/demo.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,16 +1,14 @@ ```console $ demo --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - demo[EXE] [OPTIONS] --name +Usage: demo[EXE] [OPTIONS] --name -OPTIONS: - -c, --count Number of times to greet [default: 1] - -h, --help Print help information - -n, --name Name of the person to greet - -V, --version Print version information +Options: + -n, --name Name of the person to greet + -c, --count Number of times to greet [default: 1] + -h, --help Print help information + -V, --version Print version information $ demo --name Me Hello Me! diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/demo.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/demo.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/demo.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/demo.rs 2023-02-01 05:24:55.000000000 +0000 @@ -2,14 +2,14 @@ /// Simple program to greet a person #[derive(Parser, Debug)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Args { /// Name of the person to greet - #[clap(short, long, value_parser)] + #[arg(short, long)] name: String, /// Number of times to greet - #[clap(short, long, value_parser, default_value_t = 1)] + #[arg(short, long, default_value_t = 1)] count: u8, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_args.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_args.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_args.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_args.rs 2023-02-01 05:24:55.000000000 +0000 @@ -2,7 +2,7 @@ #[derive(Args, Debug)] struct DerivedArgs { - #[clap(short, long, action)] + #[arg(short, long)] derived: bool, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_subcommands.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_subcommands.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_subcommands.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/augment_subcommands.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,7 +3,7 @@ #[derive(Parser, Debug)] enum Subcommands { Derived { - #[clap(short, long, action)] + #[arg(short, long)] derived_flag: bool, }, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,47 +0,0 @@ -*Jump to [source](custom-bool.rs)* - -Example of overriding the magic `bool` behavior - -```console -$ custom-bool --help -clap [..] -A simple to use, efficient, and full-featured Command Line Argument Parser - -USAGE: - custom-bool[EXE] [OPTIONS] --foo - -ARGS: - [possible values: true, false] - -OPTIONS: - --bar [default: false] - --foo [possible values: true, false] - -h, --help Print help information - -V, --version Print version information - -$ custom-bool -? failed -error: The following required arguments were not provided: - --foo - - -USAGE: - custom-bool[EXE] [OPTIONS] --foo - -For more information try --help - -$ custom-bool --foo true false -[examples/derive_ref/custom-bool.rs:31] opt = Opt { - foo: true, - bar: false, - boom: false, -} - -$ custom-bool --foo true --bar true false -[examples/derive_ref/custom-bool.rs:31] opt = Opt { - foo: true, - bar: true, - boom: false, -} - -``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/custom-bool.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -use clap::Parser; - -#[derive(Parser, Debug, PartialEq)] -#[clap(author, version, about, long_about = None)] -struct Opt { - // Default parser for `Set` is FromStr::from_str. - // `impl FromStr for bool` parses `true` or `false` so this - // works as expected. - #[clap(long, action = clap::ArgAction::Set)] - foo: bool, - - // Of course, this could be done with an explicit parser function. - #[clap(long, action = clap::ArgAction::Set, value_parser = true_or_false, default_value_t)] - bar: bool, - - // `bool` can be positional only with explicit `action` annotation - #[clap(action = clap::ArgAction::Set)] - boom: bool, -} - -fn true_or_false(s: &str) -> Result { - match s { - "true" => Ok(true), - "false" => Ok(false), - _ => Err("expected `true` or `false`"), - } -} - -fn main() { - let opt = Opt::parse(); - dbg!(opt); -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/flatten_hand_args.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/flatten_hand_args.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/flatten_hand_args.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/flatten_hand_args.rs 2023-02-01 05:24:55.000000000 +0000 @@ -35,7 +35,7 @@ } impl Args for CliArgs { - fn augment_args(cmd: Command<'_>) -> Command<'_> { + fn augment_args(cmd: Command) -> Command { cmd.arg( Arg::new("foo") .short('f') @@ -48,9 +48,14 @@ .long("bar") .action(ArgAction::SetTrue), ) - .arg(Arg::new("quuz").short('q').long("quuz").takes_value(true)) + .arg( + Arg::new("quuz") + .short('q') + .long("quuz") + .action(ArgAction::Set), + ) } - fn augment_args_for_update(cmd: Command<'_>) -> Command<'_> { + fn augment_args_for_update(cmd: Command) -> Command { cmd.arg( Arg::new("foo") .short('f') @@ -63,15 +68,20 @@ .long("bar") .action(ArgAction::SetTrue), ) - .arg(Arg::new("quuz").short('q').long("quuz").takes_value(true)) + .arg( + Arg::new("quuz") + .short('q') + .long("quuz") + .action(ArgAction::Set), + ) } } #[derive(Parser, Debug)] struct Cli { - #[clap(short, long, action)] + #[arg(short, long)] top_level: bool, - #[clap(flatten)] + #[command(flatten)] more_args: CliArgs, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/hand_subcommand.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/hand_subcommand.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/hand_subcommand.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/hand_subcommand.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,14 +3,12 @@ #[derive(Parser, Debug)] struct AddArgs { - #[clap(value_parser)] name: Vec, } #[derive(Parser, Debug)] struct RemoveArgs { - #[clap(short, long, action)] + #[arg(short, long)] force: bool, - #[clap(value_parser)] name: Vec, } @@ -26,7 +24,7 @@ Some(("add", args)) => Ok(Self::Add(AddArgs::from_arg_matches(args)?)), Some(("remove", args)) => Ok(Self::Remove(RemoveArgs::from_arg_matches(args)?)), Some((_, _)) => Err(Error::raw( - ErrorKind::UnrecognizedSubcommand, + ErrorKind::InvalidSubcommand, "Valid subcommands are `add` and `remove`", )), None => Err(Error::raw( @@ -41,7 +39,7 @@ Some(("remove", args)) => *self = Self::Remove(RemoveArgs::from_arg_matches(args)?), Some((_, _)) => { return Err(Error::raw( - ErrorKind::UnrecognizedSubcommand, + ErrorKind::InvalidSubcommand, "Valid subcommands are `add` and `remove`", )) } @@ -52,12 +50,12 @@ } impl Subcommand for CliSub { - fn augment_subcommands(cmd: Command<'_>) -> Command<'_> { + fn augment_subcommands(cmd: Command) -> Command { cmd.subcommand(AddArgs::augment_args(Command::new("add"))) .subcommand(RemoveArgs::augment_args(Command::new("remove"))) .subcommand_required(true) } - fn augment_subcommands_for_update(cmd: Command<'_>) -> Command<'_> { + fn augment_subcommands_for_update(cmd: Command) -> Command { cmd.subcommand(AddArgs::augment_args(Command::new("add"))) .subcommand(RemoveArgs::augment_args(Command::new("remove"))) .subcommand_required(true) @@ -69,7 +67,7 @@ #[derive(Parser, Debug)] struct Cli { - #[clap(short, long, action)] + #[arg(short, long)] top_level: bool, #[clap(subcommand)] subcommand: CliSub, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/interop_tests.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/interop_tests.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/interop_tests.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/derive_ref/interop_tests.md 2023-02-01 05:24:55.000000000 +0000 @@ -37,12 +37,9 @@ ? failed error: Found argument '--unknown' which wasn't expected, or isn't valid in this context - If you tried to supply `--unknown` as a value rather than a flag, use `-- --unknown` +Usage: interop_augment_args[EXE] [OPTIONS] -USAGE: - interop_augment_args[EXE] [OPTIONS] - -For more information try --help +For more information try '--help' ``` @@ -75,24 +72,20 @@ ? failed error: Found argument '--unknown' which wasn't expected, or isn't valid in this context - If you tried to supply `--unknown` as a value rather than a flag, use `-- --unknown` - -USAGE: - interop_augment_subcommands[EXE] derived [OPTIONS] +Usage: interop_augment_subcommands[EXE] derived [OPTIONS] -For more information try --help +For more information try '--help' ``` ```console $ interop_augment_subcommands unknown ? failed -error: Found argument 'unknown' which wasn't expected, or isn't valid in this context +error: The subcommand 'unknown' wasn't recognized -USAGE: - interop_augment_subcommands[EXE] [SUBCOMMAND] +Usage: interop_augment_subcommands[EXE] [COMMAND] -For more information try --help +For more information try '--help' ``` @@ -101,12 +94,16 @@ ```console $ interop_hand_subcommand ? failed -error: 'interop_hand_subcommand[EXE]' requires a subcommand but one was not provided +Usage: interop_hand_subcommand[EXE] [OPTIONS] -USAGE: - interop_hand_subcommand[EXE] [OPTIONS] - -For more information try --help +Commands: + add + remove + help Print this message or the help of the given subcommand(s) + +Options: + -t, --top-level + -h, --help Print help information ``` @@ -145,12 +142,11 @@ ? failed error: Found argument '--unknown' which wasn't expected, or isn't valid in this context - If you tried to supply `--unknown` as a value rather than a flag, use `-- --unknown` + If you tried to supply '--unknown' as a value rather than a flag, use '-- --unknown' -USAGE: - interop_hand_subcommand[EXE] add [NAME]... +Usage: interop_hand_subcommand[EXE] add [NAME]... -For more information try --help +For more information try '--help' ``` @@ -189,12 +185,11 @@ ```console $ interop_hand_subcommand unknown ? failed -error: Found argument 'unknown' which wasn't expected, or isn't valid in this context +error: The subcommand 'unknown' wasn't recognized -USAGE: - interop_hand_subcommand[EXE] [OPTIONS] +Usage: interop_hand_subcommand[EXE] [OPTIONS] -For more information try --help +For more information try '--help' ``` @@ -246,11 +241,8 @@ ? failed error: Found argument '--unknown' which wasn't expected, or isn't valid in this context - If you tried to supply `--unknown` as a value rather than a flag, use `-- --unknown` - -USAGE: - interop_flatten_hand_args[EXE] [OPTIONS] +Usage: interop_flatten_hand_args[EXE] [OPTIONS] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.md 2023-02-01 05:24:55.000000000 +0000 @@ -5,20 +5,18 @@ Let's see what this looks like in the help: ```console $ escaped-positional-derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - escaped-positional-derive[EXE] [OPTIONS] [-- ...] +Usage: escaped-positional-derive[EXE] [OPTIONS] [-- ...] -ARGS: - ... +Arguments: + [SLOP]... -OPTIONS: - -f - -h, --help Print help information - -p - -V, --version Print version information +Options: + -f + -p + -h, --help Print help information + -V, --version Print version information ``` @@ -37,10 +35,9 @@ ? failed error: Found argument 'foo' which wasn't expected, or isn't valid in this context -USAGE: - escaped-positional-derive[EXE] [OPTIONS] [-- ...] +Usage: escaped-positional-derive[EXE] [OPTIONS] [-- ...] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional-derive.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,15 @@ use clap::Parser; #[derive(Parser)] // requires `derive` feature -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { - #[clap(short = 'f', action)] + #[arg(short = 'f')] eff: bool, - #[clap(short = 'p', value_name = "PEAR", value_parser)] + #[arg(short = 'p', value_name = "PEAR")] pea: Option, - #[clap(last = true, value_parser)] + #[arg(last = true)] slop: Vec, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.md 2023-02-01 05:24:55.000000000 +0000 @@ -5,20 +5,18 @@ Let's see what this looks like in the help: ```console $ escaped-positional --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - escaped-positional[EXE] [OPTIONS] [-- ...] +Usage: escaped-positional[EXE] [OPTIONS] [-- ...] -ARGS: - ... +Arguments: + [SLOP]... -OPTIONS: - -f - -h, --help Print help information - -p - -V, --version Print version information +Options: + -f + -p + -h, --help Print help information + -V, --version Print version information ``` @@ -37,10 +35,9 @@ ? failed error: Found argument 'foo' which wasn't expected, or isn't valid in this context -USAGE: - escaped-positional[EXE] [OPTIONS] [-- ...] +Usage: escaped-positional[EXE] [OPTIONS] [-- ...] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/escaped-positional.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,15 +3,11 @@ fn main() { let matches = command!() // requires `cargo` feature .arg(arg!(eff: -f).action(ArgAction::SetTrue)) - .arg( - arg!(pea: -p ) - .required(false) - .value_parser(value_parser!(String)), - ) + .arg(arg!(pea: -p ).value_parser(value_parser!(String))) .arg( // Indicates that `slop` is only accessible after `--`. arg!(slop: [SLOP]) - .multiple_values(true) + .num_args(1..) .last(true) .value_parser(value_parser!(String)), ) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/find.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/find.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/find.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/find.md 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,45 @@ +`find` is an example of position-sensitive flags + +```console +$ find --help +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: find[EXE] [OPTIONS] + +Options: + -h, --help Print help information + -V, --version Print version information + +TESTS: + --empty File is empty and is either a regular file or a directory + --name Base of file name (the path with the leading directories removed) matches shell + pattern pattern + +OPERATORS: + -o, --or expr2 is not evaluate if exp1 is true + -a, --and Same as `expr1 expr1` + +$ find --empty -o --name .keep +[ + ( + "empty", + Bool( + true, + ), + ), + ( + "or", + Bool( + true, + ), + ), + ( + "name", + String( + ".keep", + ), + ), +] + +``` + diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/find.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/find.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/find.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/find.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,99 @@ +use std::collections::BTreeMap; + +use clap::{arg, command, ArgGroup, ArgMatches, Command}; + +fn main() { + let matches = cli().get_matches(); + let values = Value::from_matches(&matches); + println!("{:#?}", values); +} + +fn cli() -> Command { + command!() + .group(ArgGroup::new("tests").multiple(true)) + .next_help_heading("TESTS") + .args([ + arg!(--empty "File is empty and is either a regular file or a directory").group("tests"), + arg!(--name "Base of file name (the path with the leading directories removed) matches shell pattern pattern").group("tests"), + ]) + .group(ArgGroup::new("operators").multiple(true)) + .next_help_heading("OPERATORS") + .args([ + arg!(-o - -or "expr2 is not evaluate if exp1 is true").group("operators"), + arg!(-a - -and "Same as `expr1 expr1`").group("operators"), + ]) +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub enum Value { + Bool(bool), + String(String), +} + +impl Value { + pub fn from_matches(matches: &ArgMatches) -> Vec<(clap::Id, Self)> { + let mut values = BTreeMap::new(); + for id in matches.ids() { + if matches.try_get_many::(id.as_str()).is_ok() { + // ignore groups + continue; + } + let value_source = matches + .value_source(id.as_str()) + .expect("id came from matches"); + if value_source != clap::parser::ValueSource::CommandLine { + // Any other source just gets tacked on at the end (like default values) + continue; + } + if Self::extract::(matches, id, &mut values) { + continue; + } + if Self::extract::(matches, id, &mut values) { + continue; + } + unimplemented!("unknown type for {}: {:?}", id, matches); + } + values.into_values().collect::>() + } + + fn extract + Send + Sync + 'static>( + matches: &ArgMatches, + id: &clap::Id, + output: &mut BTreeMap, + ) -> bool { + match matches.try_get_many::(id.as_str()) { + Ok(Some(values)) => { + for (value, index) in values.zip( + matches + .indices_of(id.as_str()) + .expect("id came from matches"), + ) { + output.insert(index, (id.clone(), value.clone().into())); + } + true + } + Ok(None) => { + unreachable!("`ids` only reports what is present") + } + Err(clap::parser::MatchesError::UnknownArgument { .. }) => { + unreachable!("id came from matches") + } + Err(clap::parser::MatchesError::Downcast { .. }) => false, + Err(_) => { + unreachable!("id came from matches") + } + } + } +} + +impl From for Value { + fn from(other: String) -> Self { + Self::String(other) + } +} + +impl From for Value { + fn from(other: bool) -> Self { + Self::Bool(other) + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.md 2023-02-01 05:24:55.000000000 +0000 @@ -6,51 +6,47 @@ ```console $ git-derive ? failed -git A fictional versioning CLI -USAGE: - git-derive[EXE] +Usage: git-derive[EXE] -OPTIONS: - -h, --help Print help information +Commands: + clone Clones repos + diff Compare two commits + push pushes things + add adds things + stash + help Print this message or the help of the given subcommand(s) -SUBCOMMANDS: - add adds things - clone Clones repos - help Print this message or the help of the given subcommand(s) - push pushes things - stash +Options: + -h, --help Print help information $ git-derive help -git A fictional versioning CLI -USAGE: - git-derive[EXE] +Usage: git-derive[EXE] -OPTIONS: - -h, --help Print help information +Commands: + clone Clones repos + diff Compare two commits + push pushes things + add adds things + stash + help Print this message or the help of the given subcommand(s) -SUBCOMMANDS: - add adds things - clone Clones repos - help Print this message or the help of the given subcommand(s) - push pushes things - stash +Options: + -h, --help Print help information $ git-derive help add -git-derive[EXE]-add adds things -USAGE: - git-derive[EXE] add ... +Usage: git-derive[EXE] add ... -ARGS: - ... Stuff to add +Arguments: + ... Stuff to add -OPTIONS: - -h, --help Print help information +Options: + -h, --help Print help information ``` @@ -58,17 +54,15 @@ ```console $ git-derive add ? failed -git-derive[EXE]-add adds things -USAGE: - git-derive[EXE] add ... +Usage: git-derive[EXE] add ... -ARGS: - ... Stuff to add +Arguments: + ... Stuff to add -OPTIONS: - -h, --help Print help information +Options: + -h, --help Print help information $ git-derive add Cargo.toml Cargo.lock Adding ["Cargo.toml", "Cargo.lock"] @@ -78,43 +72,34 @@ Default subcommand: ```console $ git-derive stash -h -git-derive[EXE]-stash +Usage: git-derive[EXE] stash [OPTIONS] + git-derive[EXE] stash -USAGE: - git-derive[EXE] stash [OPTIONS] - git-derive[EXE] stash - -OPTIONS: - -h, --help Print help information - -m, --message - -SUBCOMMANDS: - apply - help Print this message or the help of the given subcommand(s) - pop - push +Commands: + push + pop + apply + help Print this message or the help of the given subcommand(s) + +Options: + -m, --message + -h, --help Print help information $ git-derive stash push -h -git-derive[EXE]-stash-push +Usage: git-derive[EXE] stash push [OPTIONS] -USAGE: - git-derive[EXE] stash push [OPTIONS] - -OPTIONS: - -h, --help Print help information - -m, --message +Options: + -m, --message + -h, --help Print help information $ git-derive stash pop -h -git-derive[EXE]-stash-pop - -USAGE: - git-derive[EXE] stash pop [STASH] +Usage: git-derive[EXE] stash pop [STASH] -ARGS: - +Arguments: + [STASH] -OPTIONS: - -h, --help Print help information +Options: + -h, --help Print help information $ git-derive stash -m "Prototype" Pushing StashPush { message: Some("Prototype") } @@ -136,3 +121,39 @@ Calling out to "custom-tool" with ["arg1", "--foo", "bar"] ``` + +Last argument: +```console +$ git-derive diff --help +Compare two commits + +Usage: git-derive[EXE] diff [OPTIONS] [COMMIT] [COMMIT] [-- ] + +Arguments: + [COMMIT] + [COMMIT] + [PATH] + +Options: + --color[=] [default: auto] [possible values: always, auto, never] + -h, --help Print help information + +$ git-derive diff +Diffing stage..worktree (color=auto) + +$ git-derive diff ./src +Diffing stage..worktree ./src (color=auto) + +$ git-derive diff HEAD ./src +Diffing HEAD..worktree ./src (color=auto) + +$ git-derive diff HEAD~~ -- HEAD +Diffing HEAD~~..worktree HEAD (color=auto) + +$ git-derive diff --color +Diffing stage..worktree (color=always) + +$ git-derive diff --color=never +Diffing stage..worktree (color=never) + +``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git-derive.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,71 +1,99 @@ +use std::ffi::OsStr; use std::ffi::OsString; use std::path::PathBuf; -use clap::{Args, Parser, Subcommand}; +use clap::{Args, Parser, Subcommand, ValueEnum}; /// A fictional versioning CLI #[derive(Debug, Parser)] // requires `derive` feature -#[clap(name = "git")] -#[clap(about = "A fictional versioning CLI", long_about = None)] +#[command(name = "git")] +#[command(about = "A fictional versioning CLI", long_about = None)] struct Cli { - #[clap(subcommand)] + #[command(subcommand)] command: Commands, } #[derive(Debug, Subcommand)] enum Commands { /// Clones repos - #[clap(arg_required_else_help = true)] + #[command(arg_required_else_help = true)] Clone { /// The remote to clone - #[clap(value_parser)] remote: String, }, + /// Compare two commits + Diff { + #[arg(value_name = "COMMIT")] + base: Option, + #[arg(value_name = "COMMIT")] + head: Option, + #[arg(last = true)] + path: Option, + #[arg( + long, + require_equals = true, + value_name = "WHEN", + num_args = 0..=1, + default_value_t = ColorWhen::Auto, + default_missing_value = "always", + value_enum + )] + color: ColorWhen, + }, /// pushes things - #[clap(arg_required_else_help = true)] + #[command(arg_required_else_help = true)] Push { /// The remote to target - #[clap(value_parser)] remote: String, }, /// adds things - #[clap(arg_required_else_help = true)] + #[command(arg_required_else_help = true)] Add { /// Stuff to add - #[clap(required = true, value_parser)] + #[arg(required = true)] path: Vec, }, Stash(Stash), - #[clap(external_subcommand)] + #[command(external_subcommand)] External(Vec), } +#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq)] +enum ColorWhen { + Always, + Auto, + Never, +} + +impl std::fmt::Display for ColorWhen { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.to_possible_value() + .expect("no values are skipped") + .get_name() + .fmt(f) + } +} + #[derive(Debug, Args)] -#[clap(args_conflicts_with_subcommands = true)] +#[command(args_conflicts_with_subcommands = true)] struct Stash { - #[clap(subcommand)] + #[command(subcommand)] command: Option, - #[clap(flatten)] + #[command(flatten)] push: StashPush, } #[derive(Debug, Subcommand)] enum StashCommands { Push(StashPush), - Pop { - #[clap(value_parser)] - stash: Option, - }, - Apply { - #[clap(value_parser)] - stash: Option, - }, + Pop { stash: Option }, + Apply { stash: Option }, } #[derive(Debug, Args)] struct StashPush { - #[clap(short, long, value_parser)] + #[arg(short, long)] message: Option, } @@ -76,6 +104,37 @@ Commands::Clone { remote } => { println!("Cloning {}", remote); } + Commands::Diff { + mut base, + mut head, + mut path, + color, + } => { + if path.is_none() { + path = head; + head = None; + if path.is_none() { + path = base; + base = None; + } + } + let base = base + .as_deref() + .map(|s| s.to_str().unwrap()) + .unwrap_or("stage"); + let head = head + .as_deref() + .map(|s| s.to_str().unwrap()) + .unwrap_or("worktree"); + let path = path.as_deref().unwrap_or_else(|| OsStr::new("")); + println!( + "Diffing {}..{} {} (color={})", + base, + head, + path.to_string_lossy(), + color + ); + } Commands::Push { remote } => { println!("Pushing to {}", remote); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git.md 2023-02-01 05:24:55.000000000 +0000 @@ -4,51 +4,47 @@ ```console $ git ? failed -git A fictional versioning CLI -USAGE: - git[EXE] +Usage: git[EXE] -OPTIONS: - -h, --help Print help information +Commands: + clone Clones repos + diff Compare two commits + push pushes things + add adds things + stash + help Print this message or the help of the given subcommand(s) -SUBCOMMANDS: - add adds things - clone Clones repos - help Print this message or the help of the given subcommand(s) - push pushes things - stash +Options: + -h, --help Print help information $ git help -git A fictional versioning CLI -USAGE: - git[EXE] +Usage: git[EXE] -OPTIONS: - -h, --help Print help information +Commands: + clone Clones repos + diff Compare two commits + push pushes things + add adds things + stash + help Print this message or the help of the given subcommand(s) -SUBCOMMANDS: - add adds things - clone Clones repos - help Print this message or the help of the given subcommand(s) - push pushes things - stash +Options: + -h, --help Print help information $ git help add -git[EXE]-add adds things -USAGE: - git[EXE] add ... +Usage: git[EXE] add ... -ARGS: - ... Stuff to add +Arguments: + ... Stuff to add -OPTIONS: - -h, --help Print help information +Options: + -h, --help Print help information ``` @@ -56,17 +52,15 @@ ```console $ git add ? failed -git[EXE]-add adds things -USAGE: - git[EXE] add ... +Usage: git[EXE] add ... -ARGS: - ... Stuff to add +Arguments: + ... Stuff to add -OPTIONS: - -h, --help Print help information +Options: + -h, --help Print help information $ git add Cargo.toml Cargo.lock Adding ["Cargo.toml", "Cargo.lock"] @@ -76,43 +70,34 @@ Default subcommand: ```console $ git stash -h -git[EXE]-stash +Usage: git[EXE] stash [OPTIONS] + git[EXE] stash -USAGE: - git[EXE] stash [OPTIONS] - git[EXE] stash - -OPTIONS: - -h, --help Print help information - -m, --message - -SUBCOMMANDS: - apply - help Print this message or the help of the given subcommand(s) - pop - push +Commands: + push + pop + apply + help Print this message or the help of the given subcommand(s) + +Options: + -m, --message + -h, --help Print help information $ git stash push -h -git[EXE]-stash-push +Usage: git[EXE] stash push [OPTIONS] -USAGE: - git[EXE] stash push [OPTIONS] - -OPTIONS: - -h, --help Print help information - -m, --message +Options: + -m, --message + -h, --help Print help information $ git stash pop -h -git[EXE]-stash-pop - -USAGE: - git[EXE] stash pop [STASH] +Usage: git[EXE] stash pop [STASH] -ARGS: - +Arguments: + [STASH] -OPTIONS: - -h, --help Print help information +Options: + -h, --help Print help information $ git stash -m "Prototype" Pushing Some("Prototype") @@ -134,3 +119,39 @@ Calling out to "custom-tool" with ["arg1", "--foo", "bar"] ``` + +Last argument: +```console +$ git diff --help +Compare two commits + +Usage: git[EXE] diff [OPTIONS] [COMMIT] [COMMIT] [-- ] + +Arguments: + [COMMIT] + [COMMIT] + [PATH] + +Options: + --color[=] [default: auto] [possible values: always, auto, never] + -h, --help Print help information + +$ git diff +Diffing stage..worktree (color=auto) + +$ git diff ./src +Diffing stage..worktree ./src (color=auto) + +$ git diff HEAD ./src +Diffing HEAD..worktree ./src (color=auto) + +$ git diff HEAD~~ -- HEAD +Diffing HEAD~~..worktree HEAD (color=auto) + +$ git diff --color +Diffing stage..worktree (color=always) + +$ git diff --color=never +Diffing stage..worktree (color=never) + +``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/git.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/git.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,13 +3,12 @@ use clap::{arg, Command}; -fn cli() -> Command<'static> { +fn cli() -> Command { Command::new("git") .about("A fictional versioning CLI") .subcommand_required(true) .arg_required_else_help(true) .allow_external_subcommands(true) - .allow_invalid_utf8_for_external_subcommands(true) .subcommand( Command::new("clone") .about("Clones repos") @@ -17,6 +16,21 @@ .arg_required_else_help(true), ) .subcommand( + Command::new("diff") + .about("Compare two commits") + .arg(arg!(base: [COMMIT])) + .arg(arg!(head: [COMMIT])) + .arg(arg!(path: [PATH]).last(true)) + .arg( + arg!(--color ) + .value_parser(["always", "auto", "never"]) + .num_args(0..=1) + .require_equals(true) + .default_value("auto") + .default_missing_value("always"), + ), + ) + .subcommand( Command::new("push") .about("pushes things") .arg(arg!( "The remote to target")) @@ -38,8 +52,8 @@ ) } -fn push_args() -> Vec> { - vec![arg!(-m --message ).required(false)] +fn push_args() -> Vec { + vec![arg!(-m --message )] } fn main() { @@ -52,6 +66,28 @@ sub_matches.get_one::("REMOTE").expect("required") ); } + Some(("diff", sub_matches)) => { + let color = sub_matches + .get_one::("color") + .map(|s| s.as_str()) + .expect("defaulted in clap"); + + let mut base = sub_matches.get_one::("base").map(|s| s.as_str()); + let mut head = sub_matches.get_one::("head").map(|s| s.as_str()); + let mut path = sub_matches.get_one::("path").map(|s| s.as_str()); + if path.is_none() { + path = head; + head = None; + if path.is_none() { + path = base; + base = None; + } + } + let base = base.unwrap_or("stage"); + let head = head.unwrap_or("worktree"); + let path = path.unwrap_or(""); + println!("Diffing {}..{} {} (color={})", base, head, path, color); + } Some(("push", sub_matches)) => { println!( "Pushing to {}", diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,4 +1,4 @@ -See the documentation for [`Command::multicall`][crate::App::multicall] for rationale. +See the documentation for [`Command::multicall`][crate::Command::multicall] for rationale. This example omits every command except true and false, which are the most trivial to implement, @@ -25,18 +25,15 @@ ```console $ busybox ? failed -busybox - -USAGE: - busybox [OPTIONS] [APPLET] - -OPTIONS: - -h, --help Print help information - --install Install hardlinks for all subcommands in path +Usage: busybox [OPTIONS] [APPLET] APPLETS: - false does nothing unsuccessfully - help Print this message or the help of the given subcommand(s) - true does nothing successfully + true does nothing successfully + false does nothing unsuccessfully + help Print this message or the help of the given subcommand(s) + +Options: + --install Install hardlinks for all subcommands in path + -h, --help Print help information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-busybox.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,9 @@ use std::path::PathBuf; use std::process::exit; -use clap::{value_parser, Arg, Command}; +use clap::{value_parser, Arg, ArgAction, Command}; -fn applet_commands() -> [Command<'static>; 2] { +fn applet_commands() -> [Command; 2] { [ Command::new("true").about("does nothing successfully"), Command::new("false").about("does nothing unsuccessfully"), @@ -23,10 +23,9 @@ .long("install") .help("Install hardlinks for all subcommands in path") .exclusive(true) - .takes_value(true) + .action(ArgAction::Set) .default_missing_value("/usr/local/bin") - .value_parser(value_parser!(PathBuf)) - .use_value_delimiter(false), + .value_parser(value_parser!(PathBuf)), ) .subcommands(applet_commands()), ) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-hostname.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-hostname.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-hostname.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/multicall-hostname.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,4 +1,4 @@ -See the documentation for [`Command::multicall`][crate::App::multicall] for rationale. +See the documentation for [`Command::multicall`][crate::Command::multicall] for rationale. This example omits the implementation of displaying address config diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.md 2023-02-01 05:24:55.000000000 +0000 @@ -35,36 +35,31 @@ In the help, this looks like: ```console $ pacman -h -pacman 5.2.1 -Pacman Development Team package manager utility -USAGE: - pacman[EXE] +Usage: pacman[EXE] -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - help Print this message or the help of the given subcommand(s) - query -Q --query Query the package database. - sync -S --sync Synchronize packages. +Commands: + query, -Q, --query Query the package database. + sync, -S, --sync Synchronize packages. + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help information + -V, --version Print version information $ pacman -S -h -pacman[EXE]-sync Synchronize packages. -USAGE: - pacman[EXE] {sync|--sync|-S} [OPTIONS] [--] [package]... +Usage: pacman[EXE] {sync|--sync|-S} [OPTIONS] [package]... -ARGS: - ... packages +Arguments: + [package]... packages -OPTIONS: - -h, --help Print help information - -i, --info view package information - -s, --search ... search remote repositories for matching strings +Options: + -s, --search ... search remote repositories for matching strings + -i, --info view package information + -h, --help Print help information ``` @@ -74,10 +69,9 @@ ? failed error: The argument '--search ...' cannot be used with '--info' -USAGE: - pacman[EXE] {sync|--sync|-S} --search ... ... +Usage: pacman[EXE] {sync|--sync|-S} --search ... ... -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/pacman.rs 2023-02-01 05:24:55.000000000 +0000 @@ -21,8 +21,8 @@ .long("search") .help("search locally installed packages for matching strings") .conflicts_with("info") - .takes_value(true) - .multiple_values(true), + .action(ArgAction::Set) + .num_args(1..), ) .arg( Arg::new("info") @@ -30,8 +30,8 @@ .short('i') .conflicts_with("search") .help("view package information") - .takes_value(true) - .multiple_values(true), + .action(ArgAction::Set) + .num_args(1..), ), ) // Sync subcommand @@ -47,8 +47,8 @@ .short('s') .long("search") .conflicts_with("info") - .takes_value(true) - .multiple_values(true) + .action(ArgAction::Set) + .num_args(1..) .help("search remote repositories for matching strings"), ) .arg( @@ -63,8 +63,8 @@ Arg::new("package") .help("packages") .required_unless_present("search") - .takes_value(true) - .multiple_values(true), + .action(ArgAction::Set) + .num_args(1..), ), ) .get_matches(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/README.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/README.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/README.md 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,16 @@ +# Examples + +We try to focus our documentation on the [four types of +documentation](https://documentation.divio.com/). Examples fit into this by +providing: +- [Cookbook / How-To Guides](https://docs.rs/clap/latest/clap/_cookbook/index.html) +- Tutorials ([derive](https://docs.rs/clap/latest/clap/_derive/_tutorial/index.html), [builder](https://docs.rs/clap/latest/clap/_tutorial/index.html)) + +This directory contains the source for the above. + +## Contributing + +New examples should fit within the above structure and support their narrative +- Add the example to [Cargo.toml](../Cargo.toml) for any `required-features` +- Document how the example works with a `.md` file which will be verified using [trycmd](https://docs.rs/trycmd) +- Pull the `.rs` and `.md` file into the appropriate module doc comment to be accessible on docs.rs diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/repl.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/repl.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/repl.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/repl.rs 2023-02-01 05:24:55.000000000 +0000 @@ -48,7 +48,7 @@ Ok(false) } -fn cli() -> Command<'static> { +fn cli() -> Command { // strip out usage const PARSER_TEMPLATE: &str = "\ {all-args} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,23 +1,21 @@ ```console $ 01_quick --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 01_quick[EXE] [OPTIONS] [name] [SUBCOMMAND] +Usage: 01_quick[EXE] [OPTIONS] [name] [COMMAND] -ARGS: - Optional name to operate on - -OPTIONS: - -c, --config Sets a custom config file - -d, --debug Turn debugging information on - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - help Print this message or the help of the given subcommand(s) - test does testing things +Commands: + test does testing things + help Print this message or the help of the given subcommand(s) + +Arguments: + [name] Optional name to operate on + +Options: + -c, --config Sets a custom config file + -d, --debug... Turn debugging information on + -h, --help Print help information + -V, --version Print version information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/01_quick.rs 2023-02-01 05:24:55.000000000 +0000 @@ -13,12 +13,9 @@ .required(false) .value_parser(value_parser!(PathBuf)), ) - .arg( - arg!( - -d --debug "Turn debugging information on" - ) - .action(ArgAction::Count), - ) + .arg(arg!( + -d --debug ... "Turn debugging information on" + )) .subcommand( Command::new("test") .about("does testing things") diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,19 +1,17 @@ ```console $ 02_app_settings --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 02_app_settings[EXE] --two --one +Usage: 02_app_settings[EXE] --two --one -OPTIONS: - --two - --one - -h, --help Print help information - -V, --version Print version information - -$ 02_app_settings --one -1 --one -3 --two 10 -two: "10" -one: "-3" +Options: + --two + + --one + + -h, --help + Print help information + -V, --version + Print version information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_app_settings.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,11 +1,10 @@ -use clap::{arg, command, AppSettings, ArgAction}; +use clap::{arg, command, ArgAction}; fn main() { let matches = command!() // requires `cargo` feature - .global_setting(AppSettings::DeriveDisplayOrder) - .allow_negative_numbers(true) - .arg(arg!(--two ).action(ArgAction::Set)) - .arg(arg!(--one ).action(ArgAction::Set)) + .next_line_help(true) + .arg(arg!(--two ).required(true).action(ArgAction::Set)) + .arg(arg!(--one ).required(true).action(ArgAction::Set)) .get_matches(); println!( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,17 +1,14 @@ ```console $ 02_apps --help -MyApp 1.0 -Kevin K. Does awesome things -USAGE: - 02_apps[EXE] --two --one +Usage: 02_apps[EXE] --two --one -OPTIONS: - -h, --help Print help information - --one - --two - -V, --version Print version information +Options: + --two + --one + -h, --help Print help information + -V, --version Print version information $ 02_apps --version MyApp 1.0 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_apps.rs 2023-02-01 05:24:55.000000000 +0000 @@ -5,8 +5,8 @@ .version("1.0") .author("Kevin K. ") .about("Does awesome things") - .arg(arg!(--two )) - .arg(arg!(--one )) + .arg(arg!(--two ).required(true)) + .arg(arg!(--one ).required(true)) .get_matches(); println!( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,16 +1,14 @@ ```console $ 02_crate --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 02_crate[EXE] --two --one +Usage: 02_crate[EXE] --two --one -OPTIONS: - -h, --help Print help information - --one - --two - -V, --version Print version information +Options: + --two + --one + -h, --help Print help information + -V, --version Print version information $ 02_crate --version clap [..] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/02_crate.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,8 +3,8 @@ fn main() { // requires `cargo` feature, reading name, version, author, and description from `Cargo.toml` let matches = command!() - .arg(arg!(--two )) - .arg(arg!(--one )) + .arg(arg!(--two ).required(true)) + .arg(arg!(--one ).required(true)) .get_matches(); println!( diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_bool.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_bool.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_bool.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_bool.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,13 @@ ```console $ 03_01_flag_bool --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_01_flag_bool[EXE] [OPTIONS] +Usage: 03_01_flag_bool[EXE] [OPTIONS] -OPTIONS: - -h, --help Print help information - -v, --verbose - -V, --version Print version information +Options: + -v, --verbose + -h, --help Print help information + -V, --version Print version information $ 03_01_flag_bool verbose: false @@ -18,6 +16,11 @@ verbose: true $ 03_01_flag_bool --verbose --verbose -verbose: true +? failed +error: The argument '--verbose' was provided more than once, but cannot be used multiple times + +Usage: 03_01_flag_bool[EXE] [OPTIONS] + +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,13 @@ ```console $ 03_01_flag_count --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_01_flag_count[EXE] [OPTIONS] +Usage: 03_01_flag_count[EXE] [OPTIONS] -OPTIONS: - -h, --help Print help information - -v, --verbose - -V, --version Print version information +Options: + -v, --verbose... + -h, --help Print help information + -V, --version Print version information $ 03_01_flag_count verbose: 0 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_01_flag_count.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,8 +1,13 @@ -use clap::{arg, command, ArgAction}; +use clap::{command, Arg, ArgAction}; fn main() { let matches = command!() // requires `cargo` feature - .arg(arg!(-v - -verbose).action(ArgAction::Count)) + .arg( + Arg::new("verbose") + .short('v') + .long("verbose") + .action(ArgAction::Count), + ) .get_matches(); println!("verbose: {:?}", matches.get_count("verbose")); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,13 @@ ```console $ 03_02_option --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_02_option[EXE] [OPTIONS] +Usage: 03_02_option[EXE] [OPTIONS] -OPTIONS: - -h, --help Print help information - -n, --name - -V, --version Print version information +Options: + -n, --name + -h, --help Print help information + -V, --version Print version information $ 03_02_option name: None diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.md 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,30 @@ +```console +$ 03_02_option_mult --help +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: 03_02_option_mult[EXE] [OPTIONS] + +Options: + -n, --name + -h, --help Print help information + -V, --version Print version information + +$ 03_02_option_mult +name: None + +$ 03_02_option_mult --name bob +name: Some("bob") + +$ 03_02_option_mult --name=bob +name: Some("bob") + +$ 03_02_option_mult -n bob +name: Some("bob") + +$ 03_02_option_mult -n=bob +name: Some("bob") + +$ 03_02_option_mult -nbob +name: Some("bob") + +``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option_mult.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,14 @@ +use clap::{command, Arg, ArgAction}; + +fn main() { + let matches = command!() // requires `cargo` feature + .arg( + Arg::new("name") + .short('n') + .long("name") + .action(ArgAction::Append), + ) + .get_matches(); + + println!("name: {:?}", matches.get_one::("name")); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_02_option.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,8 +1,8 @@ -use clap::{arg, command}; +use clap::{command, Arg}; fn main() { let matches = command!() // requires `cargo` feature - .arg(arg!(-n --name ).required(false)) + .arg(Arg::new("name").short('n').long("name")) .get_matches(); println!("name: {:?}", matches.get_one::("name")); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,22 +1,20 @@ ```console $ 03_03_positional --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_03_positional[EXE] [NAME] +Usage: 03_03_positional[EXE] [name] -ARGS: - +Arguments: + [name] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 03_03_positional -NAME: None +name: None $ 03_03_positional bob -NAME: Some("bob") +name: Some("bob") ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.md 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,20 @@ +```console +$ 03_03_positional_mult --help +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: 03_03_positional_mult[EXE] [name]... + +Arguments: + [name]... + +Options: + -h, --help Print help information + -V, --version Print version information + +$ 03_03_positional_mult +name: None + +$ 03_03_positional_mult bob +name: Some("bob") + +``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional_mult.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,9 @@ +use clap::{command, Arg, ArgAction}; + +fn main() { + let matches = command!() // requires `cargo` feature + .arg(Arg::new("name").action(ArgAction::Append)) + .get_matches(); + + println!("name: {:?}", matches.get_one::("name")); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_03_positional.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,9 @@ -use clap::{arg, command}; +use clap::{command, Arg}; fn main() { let matches = command!() // requires `cargo` feature - .arg(arg!([NAME])) + .arg(Arg::new("name")) .get_matches(); - println!("NAME: {:?}", matches.get_one::("NAME")); + println!("name: {:?}", matches.get_one::("name")); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_04_subcommands.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_04_subcommands.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_04_subcommands.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_04_subcommands.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,32 +1,28 @@ ```console $ 03_04_subcommands help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_04_subcommands[EXE] +Usage: 03_04_subcommands[EXE] -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - add Adds files to myapp - help Print this message or the help of the given subcommand(s) +Commands: + add Adds files to myapp + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help information + -V, --version Print version information $ 03_04_subcommands help add -03_04_subcommands[EXE]-add [..] Adds files to myapp -USAGE: - 03_04_subcommands[EXE] add [NAME] +Usage: 03_04_subcommands[EXE] add [NAME] -ARGS: - +Arguments: + [NAME] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 03_04_subcommands add bob 'myapp add' was used, name is: Some("bob") @@ -37,19 +33,17 @@ ```console $ 03_04_subcommands ? failed -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_04_subcommands[EXE] +Usage: 03_04_subcommands[EXE] -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - add Adds files to myapp - help Print this message or the help of the given subcommand(s) +Commands: + add Adds files to myapp + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help information + -V, --version Print version information ``` @@ -59,6 +53,6 @@ clap [..] $ 03_04_subcommands add --version -03_04_subcommands[EXE]-add [..] +clap-add [..] ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,22 +1,20 @@ ```console $ 03_05_default_values --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_05_default_values[EXE] [NAME] +Usage: 03_05_default_values[EXE] [PORT] -ARGS: - [default: alice] +Arguments: + [PORT] [default: 2020] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 03_05_default_values -NAME: "alice" +port: 2020 -$ 03_05_default_values bob -NAME: "bob" +$ 03_05_default_values 22 +port: 22 ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/03_05_default_values.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,14 +1,18 @@ -use clap::{arg, command}; +use clap::{arg, command, value_parser}; fn main() { let matches = command!() // requires `cargo` feature - .arg(arg!([NAME]).default_value("alice")) + .arg( + arg!([PORT]) + .value_parser(value_parser!(u16)) + .default_value("2020"), + ) .get_matches(); println!( - "NAME: {:?}", + "port: {:?}", matches - .get_one::("NAME") + .get_one::("PORT") .expect("default ensures there is always a value") ); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,17 +1,35 @@ ```console $ 04_01_enum --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_01_enum[EXE] +Usage: 04_01_enum[EXE] -ARGS: - What mode to run the program in [possible values: fast, slow] +Arguments: + + What mode to run the program in -OPTIONS: - -h, --help Print help information - -V, --version Print version information + Possible values: + - fast: Run swiftly + - slow: Crawl slowly but steadily + +Options: + -h, --help + Print help information (use `-h` for a summary) + + -V, --version + Print version information + +$ 04_01_enum -h +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: 04_01_enum[EXE] + +Arguments: + What mode to run the program in [possible values: fast, slow] + +Options: + -h, --help Print help information (use `--help` for more detail) + -V, --version Print version information $ 04_01_enum fast Hare @@ -21,9 +39,9 @@ $ 04_01_enum medium ? failed -error: "medium" isn't a valid value for '' - [possible values: fast, slow] +error: 'medium' isn't a valid value for '' + [possible values: fast, slow] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_enum.rs 2023-02-01 05:24:55.000000000 +0000 @@ -6,16 +6,16 @@ Slow, } -// Can also be derived] with feature flag `derive` +// Can also be derived with feature flag `derive` impl ValueEnum for Mode { fn value_variants<'a>() -> &'a [Self] { &[Mode::Fast, Mode::Slow] } - fn to_possible_value<'a>(&self) -> Option> { + fn to_possible_value<'a>(&self) -> Option { Some(match self { - Mode::Fast => PossibleValue::new("fast"), - Mode::Slow => PossibleValue::new("slow"), + Mode::Fast => PossibleValue::new("fast").help("Run swiftly"), + Mode::Slow => PossibleValue::new("slow").help("Crawl slowly but steadily"), }) } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_possible.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_possible.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_possible.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_01_possible.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,17 +1,15 @@ ```console $ 04_01_possible --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_01_possible[EXE] +Usage: 04_01_possible[EXE] -ARGS: - What mode to run the program in [possible values: fast, slow] +Arguments: + What mode to run the program in [possible values: fast, slow] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 04_01_possible fast Hare @@ -21,9 +19,9 @@ $ 04_01_possible medium ? failed -error: "medium" isn't a valid value for '' - [possible values: fast, slow] +error: 'medium' isn't a valid value for '' + [possible values: fast, slow] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_parse.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_parse.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_parse.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_parse.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,31 +1,29 @@ ```console $ 04_02_parse --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_02_parse[EXE] +Usage: 04_02_parse[EXE] -ARGS: - Network port to use +Arguments: + Network port to use -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 04_02_parse 22 PORT = 22 $ 04_02_parse foobar ? failed -error: Invalid value "foobar" for '': invalid digit found in string +error: Invalid value 'foobar' for '': invalid digit found in string -For more information try --help +For more information try '--help' $ 04_02_parse_derive 0 ? failed -error: Invalid value "0" for '': 0 is not in 1..=65535 +error: Invalid value '0' for '': 0 is not in 1..=65535 -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_validate.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_validate.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_validate.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_02_validate.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,31 +1,29 @@ ```console $ 04_02_validate --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_02_validate[EXE] +Usage: 04_02_validate[EXE] -ARGS: - Network port to use +Arguments: + Network port to use -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 04_02_validate 22 PORT = 22 $ 04_02_validate foobar ? failed -error: Invalid value "foobar" for '': `foobar` isn't a port number +error: Invalid value 'foobar' for '': `foobar` isn't a port number -For more information try --help +For more information try '--help' $ 04_02_validate 0 ? failed -error: Invalid value "0" for '': Port not in range 1-65535 +error: Invalid value '0' for '': Port not in range 1-65535 -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,33 +1,30 @@ ```console $ 04_03_relations --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_03_relations[EXE] [OPTIONS] <--set-ver |--major|--minor|--patch> [INPUT_FILE] +Usage: 04_03_relations[EXE] [OPTIONS] <--set-ver |--major|--minor|--patch> [INPUT_FILE] -ARGS: - some regular input +Arguments: + [INPUT_FILE] some regular input -OPTIONS: - -c - -h, --help Print help information - --major auto inc major - --minor auto inc minor - --patch auto inc patch - --set-ver set version manually - --spec-in some special input argument - -V, --version Print version information +Options: + --set-ver set version manually + --major auto inc major + --minor auto inc minor + --patch auto inc patch + --spec-in some special input argument + -c + -h, --help Print help information + -V, --version Print version information $ 04_03_relations ? failed error: The following required arguments were not provided: - <--set-ver |--major|--minor|--patch> + <--set-ver |--major|--minor|--patch> -USAGE: - 04_03_relations[EXE] [OPTIONS] <--set-ver |--major|--minor|--patch> [INPUT_FILE] +Usage: 04_03_relations[EXE] <--set-ver |--major|--minor|--patch> [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_03_relations --major Version: 2.2.3 @@ -36,20 +33,18 @@ ? failed error: The argument '--major' cannot be used with '--minor' -USAGE: - 04_03_relations[EXE] <--set-ver |--major|--minor|--patch> +Usage: 04_03_relations[EXE] <--set-ver |--major|--minor|--patch> [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_03_relations --major -c config.toml ? failed error: The following required arguments were not provided: - > + > -USAGE: - 04_03_relations[EXE] -c <--set-ver |--major|--minor|--patch> > +Usage: 04_03_relations[EXE] -c <--set-ver |--major|--minor|--patch> > -For more information try --help +For more information try '--help' $ 04_03_relations --major -c config.toml --spec-in input.txt Version: 2.2.3 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_03_relations.rs 2023-02-01 05:24:55.000000000 +0000 @@ -6,7 +6,7 @@ // Create application like normal let matches = command!() // requires `cargo` feature // Add the version arguments - .arg(arg!(--"set-ver" "set version manually").required(false)) + .arg(arg!(--"set-ver" "set version manually")) .arg(arg!(--major "auto inc major").action(ArgAction::SetTrue)) .arg(arg!(--minor "auto inc minor").action(ArgAction::SetTrue)) .arg(arg!(--patch "auto inc patch").action(ArgAction::SetTrue)) @@ -14,7 +14,7 @@ .group( ArgGroup::new("vers") .required(true) - .args(&["set-ver", "major", "minor", "patch"]), + .args(["set-ver", "major", "minor", "patch"]), ) // Arguments can also be added to a group individually, these two arguments // are part of the "input" group which is not required @@ -25,7 +25,6 @@ ) .arg( arg!(--"spec-in" "some special input argument") - .required(false) .value_parser(value_parser!(PathBuf)) .group("input"), ) @@ -33,7 +32,6 @@ // (but **not** both) the "input" arguments .arg( arg!(config: -c ) - .required(false) .value_parser(value_parser!(PathBuf)) .requires("input"), ) diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,32 +1,29 @@ ```console $ 04_04_custom --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] +Usage: 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] -ARGS: - some regular input +Arguments: + [INPUT_FILE] some regular input -OPTIONS: - -c - -h, --help Print help information - --major auto inc major - --minor auto inc minor - --patch auto inc patch - --set-ver set version manually - --spec-in some special input argument - -V, --version Print version information +Options: + --set-ver set version manually + --major auto inc major + --minor auto inc minor + --patch auto inc patch + --spec-in some special input argument + -c + -h, --help Print help information + -V, --version Print version information $ 04_04_custom ? failed error: Can only modify one version field -USAGE: - 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] +Usage: 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_04_custom --major Version: 2.2.3 @@ -35,20 +32,18 @@ ? failed error: Can only modify one version field -USAGE: - 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] +Usage: 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_04_custom --major -c config.toml ? failed Version: 2.2.3 error: INPUT_FILE or --spec-in is required when using --config -USAGE: - 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] +Usage: 04_04_custom[EXE] [OPTIONS] [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_04_custom --major -c config.toml --spec-in input.txt Version: 2.2.3 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/04_04_custom.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,12 +1,13 @@ use std::path::PathBuf; -use clap::{arg, command, value_parser, ArgAction, ErrorKind}; +use clap::error::ErrorKind; +use clap::{arg, command, value_parser, ArgAction}; fn main() { // Create application like normal let mut cmd = command!() // requires `cargo` feature // Add the version arguments - .arg(arg!(--"set-ver" "set version manually").required(false)) + .arg(arg!(--"set-ver" "set version manually")) .arg(arg!(--major "auto inc major").action(ArgAction::SetTrue)) .arg(arg!(--minor "auto inc minor").action(ArgAction::SetTrue)) .arg(arg!(--patch "auto inc patch").action(ArgAction::SetTrue)) @@ -15,16 +16,11 @@ .arg(arg!([INPUT_FILE] "some regular input").value_parser(value_parser!(PathBuf))) .arg( arg!(--"spec-in" "some special input argument") - .required(false) .value_parser(value_parser!(PathBuf)), ) // Now let's assume we have a -c [config] argument which requires one of // (but **not** both) the "input" arguments - .arg( - arg!(config: -c ) - .required(false) - .value_parser(value_parser!(PathBuf)), - ); + .arg(arg!(config: -c ).value_parser(value_parser!(PathBuf))); let matches = cmd.get_matches_mut(); // Let's assume the old version 1.2.3 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/05_01_assert.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/05_01_assert.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/05_01_assert.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_builder/05_01_assert.rs 2023-02-01 05:24:55.000000000 +0000 @@ -10,7 +10,7 @@ println!("PORT = {}", port); } -fn cmd() -> clap::Command<'static> { +fn cmd() -> clap::Command { command!() // requires `cargo` feature .arg( arg!() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,23 +1,21 @@ ```console $ 01_quick_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 01_quick_derive[EXE] [OPTIONS] [NAME] [SUBCOMMAND] +Usage: 01_quick_derive[EXE] [OPTIONS] [NAME] [COMMAND] -ARGS: - Optional name to operate on - -OPTIONS: - -c, --config Sets a custom config file - -d, --debug Turn debugging information on - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - help Print this message or the help of the given subcommand(s) - test does testing things +Commands: + test does testing things + help Print this message or the help of the given subcommand(s) + +Arguments: + [NAME] Optional name to operate on + +Options: + -c, --config Sets a custom config file + -d, --debug... Turn debugging information on + -h, --help Print help information + -V, --version Print version information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/01_quick.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,21 +3,20 @@ use clap::{Parser, Subcommand}; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { /// Optional name to operate on - #[clap(value_parser)] name: Option, /// Sets a custom config file - #[clap(short, long, value_parser, value_name = "FILE")] + #[arg(short, long, value_name = "FILE")] config: Option, /// Turn debugging information on - #[clap(short, long, action = clap::ArgAction::Count)] + #[arg(short, long, action = clap::ArgAction::Count)] debug: u8, - #[clap(subcommand)] + #[command(subcommand)] command: Option, } @@ -26,7 +25,7 @@ /// does testing things Test { /// lists test values - #[clap(short, long, action)] + #[arg(short, long)] list: bool, }, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,19 +1,17 @@ ```console $ 02_app_settings_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 02_app_settings_derive[EXE] --two --one +Usage: 02_app_settings_derive[EXE] --two --one -OPTIONS: - --two - --one - -h, --help Print help information - -V, --version Print version information - -$ 02_app_settings_derive --one -1 --one -3 --two 10 -two: "10" -one: "-3" +Options: + --two + + --one + + -h, --help + Print help information + -V, --version + Print version information ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_app_settings.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,13 +1,12 @@ -use clap::{AppSettings, Parser}; +use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] -#[clap(allow_negative_numbers = true)] -#[clap(global_setting(AppSettings::DeriveDisplayOrder))] +#[command(author, version, about, long_about = None)] +#[command(next_line_help = true)] struct Cli { - #[clap(long, value_parser)] + #[arg(long)] two: String, - #[clap(long, value_parser)] + #[arg(long)] one: String, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,17 +1,14 @@ ```console $ 02_apps_derive --help -MyApp 1.0 -Kevin K. Does awesome things -USAGE: - 02_apps_derive[EXE] --two --one +Usage: 02_apps_derive[EXE] --two --one -OPTIONS: - -h, --help Print help information - --one - --two - -V, --version Print version information +Options: + --two + --one + -h, --help Print help information + -V, --version Print version information $ 02_apps_derive --version MyApp 1.0 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_apps.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,14 +1,14 @@ use clap::Parser; #[derive(Parser)] -#[clap(name = "MyApp")] -#[clap(author = "Kevin K. ")] -#[clap(version = "1.0")] -#[clap(about = "Does awesome things", long_about = None)] +#[command(name = "MyApp")] +#[command(author = "Kevin K. ")] +#[command(version = "1.0")] +#[command(about = "Does awesome things", long_about = None)] struct Cli { - #[clap(long, value_parser)] + #[arg(long)] two: String, - #[clap(long, value_parser)] + #[arg(long)] one: String, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,16 +1,14 @@ ```console $ 02_crate_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 02_crate_derive[EXE] --two --one +Usage: 02_crate_derive[EXE] --two --one -OPTIONS: - -h, --help Print help information - --one - --two - -V, --version Print version information +Options: + --two + --one + -h, --help Print help information + -V, --version Print version information $ 02_crate_derive --version clap [..] diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/02_crate.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,11 +1,11 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] // Read from `Cargo.toml` +#[command(author, version, about, long_about = None)] // Read from `Cargo.toml` struct Cli { - #[clap(long, value_parser)] + #[arg(long)] two: String, - #[clap(long, value_parser)] + #[arg(long)] one: String, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,13 @@ ```console $ 03_01_flag_bool_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_01_flag_bool_derive[EXE] [OPTIONS] +Usage: 03_01_flag_bool_derive[EXE] [OPTIONS] -OPTIONS: - -h, --help Print help information - -v, --verbose - -V, --version Print version information +Options: + -v, --verbose + -h, --help Print help information + -V, --version Print version information $ 03_01_flag_bool_derive verbose: false @@ -18,6 +16,11 @@ verbose: true $ 03_01_flag_bool_derive --verbose --verbose -verbose: true +? failed +error: The argument '--verbose' was provided more than once, but cannot be used multiple times + +Usage: 03_01_flag_bool_derive[EXE] [OPTIONS] + +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_bool.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,9 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { - #[clap(short, long, action)] + #[arg(short, long)] verbose: bool, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,13 @@ ```console $ 03_01_flag_count_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_01_flag_count_derive[EXE] [OPTIONS] +Usage: 03_01_flag_count_derive[EXE] [OPTIONS] -OPTIONS: - -h, --help Print help information - -v, --verbose - -V, --version Print version information +Options: + -v, --verbose... + -h, --help Print help information + -V, --version Print version information $ 03_01_flag_count_derive verbose: 0 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_01_flag_count.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,9 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { - #[clap(short, long, action = clap::ArgAction::Count)] + #[arg(short, long, action = clap::ArgAction::Count)] verbose: u8, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,15 +1,13 @@ ```console $ 03_02_option_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_02_option_derive[EXE] [OPTIONS] +Usage: 03_02_option_derive[EXE] [OPTIONS] -OPTIONS: - -h, --help Print help information - -n, --name - -V, --version Print version information +Options: + -n, --name + -h, --help Print help information + -V, --version Print version information $ 03_02_option_derive name: None diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.md 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,30 @@ +```console +$ 03_02_option_mult_derive --help +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: 03_02_option_mult_derive[EXE] [OPTIONS] + +Options: + -n, --name + -h, --help Print help information + -V, --version Print version information + +$ 03_02_option_mult_derive +name: [] + +$ 03_02_option_mult_derive --name bob +name: ["bob"] + +$ 03_02_option_mult_derive --name=bob +name: ["bob"] + +$ 03_02_option_mult_derive -n bob +name: ["bob"] + +$ 03_02_option_mult_derive -n=bob +name: ["bob"] + +$ 03_02_option_mult_derive -nbob +name: ["bob"] + +``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option_mult.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,14 @@ +use clap::Parser; + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Cli { + #[arg(short, long)] + name: Vec, +} + +fn main() { + let cli = Cli::parse(); + + println!("name: {:?}", cli.name); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_02_option.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,9 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { - #[clap(short, long, value_parser)] + #[arg(short, long)] name: Option, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,17 +1,15 @@ ```console $ 03_03_positional_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_03_positional_derive[EXE] [NAME] +Usage: 03_03_positional_derive[EXE] [NAME] -ARGS: - +Arguments: + [NAME] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 03_03_positional_derive name: None diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.md 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.md 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,20 @@ +```console +$ 03_03_positional_mult_derive --help +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: 03_03_positional_mult_derive[EXE] [NAME]... + +Arguments: + [NAME]... + +Options: + -h, --help Print help information + -V, --version Print version information + +$ 03_03_positional_mult_derive +name: [] + +$ 03_03_positional_mult_derive bob +name: ["bob"] + +``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional_mult.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,13 @@ +use clap::Parser; + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Cli { + name: Vec, +} + +fn main() { + let cli = Cli::parse(); + + println!("name: {:?}", cli.name); +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_03_positional.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,8 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { - #[clap(value_parser)] name: Option, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands_alt.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands_alt.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands_alt.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands_alt.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,10 +1,10 @@ use clap::{Args, Parser, Subcommand}; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] -#[clap(propagate_version = true)] +#[command(author, version, about, long_about = None)] +#[command(propagate_version = true)] struct Cli { - #[clap(subcommand)] + #[command(subcommand)] command: Commands, } @@ -16,7 +16,6 @@ #[derive(Args)] struct Add { - #[clap(value_parser)] name: Option, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,32 +1,28 @@ ```console $ 03_04_subcommands_derive help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_04_subcommands_derive[EXE] +Usage: 03_04_subcommands_derive[EXE] -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - add Adds files to myapp - help Print this message or the help of the given subcommand(s) +Commands: + add Adds files to myapp + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help information + -V, --version Print version information $ 03_04_subcommands_derive help add -03_04_subcommands_derive[EXE]-add [..] Adds files to myapp -USAGE: - 03_04_subcommands_derive[EXE] add [NAME] +Usage: 03_04_subcommands_derive[EXE] add [NAME] -ARGS: - +Arguments: + [NAME] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 03_04_subcommands_derive add bob 'myapp add' was used, name is: Some("bob") @@ -37,28 +33,26 @@ ```console $ 03_04_subcommands_derive ? failed -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_04_subcommands_derive[EXE] +Usage: 03_04_subcommands_derive[EXE] -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - add Adds files to myapp - help Print this message or the help of the given subcommand(s) +Commands: + add Adds files to myapp + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help information + -V, --version Print version information ``` -Because we added `#[clap(propagate_version = true)]`: +Because we added `#[command(propagate_version = true)]`: ```console $ 03_04_subcommands_derive --version clap [..] $ 03_04_subcommands_derive add --version -03_04_subcommands_derive[EXE]-add [..] +clap-add [..] ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_04_subcommands.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,20 +1,17 @@ use clap::{Parser, Subcommand}; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] -#[clap(propagate_version = true)] +#[command(author, version, about, long_about = None)] +#[command(propagate_version = true)] struct Cli { - #[clap(subcommand)] + #[command(subcommand)] command: Commands, } #[derive(Subcommand)] enum Commands { /// Adds files to myapp - Add { - #[clap(value_parser)] - name: Option, - }, + Add { name: Option }, } fn main() { diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,22 +1,20 @@ ```console $ 03_05_default_values_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 03_05_default_values_derive[EXE] [NAME] +Usage: 03_05_default_values_derive[EXE] [PORT] -ARGS: - [default: alice] +Arguments: + [PORT] [default: 2020] -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 03_05_default_values_derive -name: "alice" +port: 2020 -$ 03_05_default_values_derive bob -name: "bob" +$ 03_05_default_values_derive 22 +port: 22 ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/03_05_default_values.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,14 +1,14 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { - #[clap(default_value_t = String::from("alice"), value_parser)] - name: String, + #[arg(default_value_t = 2020)] + port: u16, } fn main() { let cli = Cli::parse(); - println!("name: {:?}", cli.name); + println!("port: {:?}", cli.port); } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,17 +1,35 @@ ```console $ 04_01_enum_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_01_enum_derive[EXE] +Usage: 04_01_enum_derive[EXE] -ARGS: - What mode to run the program in [possible values: fast, slow] +Arguments: + + What mode to run the program in -OPTIONS: - -h, --help Print help information - -V, --version Print version information + Possible values: + - fast: Run swiftly + - slow: Crawl slowly but steadily + +Options: + -h, --help + Print help information (use `-h` for a summary) + + -V, --version + Print version information + +$ 04_01_enum_derive -h +A simple to use, efficient, and full-featured Command Line Argument Parser + +Usage: 04_01_enum_derive[EXE] + +Arguments: + What mode to run the program in [possible values: fast, slow] + +Options: + -h, --help Print help information (use `--help` for more detail) + -V, --version Print version information $ 04_01_enum_derive fast Hare @@ -21,9 +39,9 @@ $ 04_01_enum_derive medium ? failed -error: "medium" isn't a valid value for '' - [possible values: fast, slow] +error: 'medium' isn't a valid value for '' + [possible values: fast, slow] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_01_enum.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,16 +1,20 @@ use clap::{Parser, ValueEnum}; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { /// What mode to run the program in - #[clap(arg_enum, value_parser)] + #[arg(value_enum)] mode: Mode, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] enum Mode { + /// Run swiftly Fast, + /// Crawl slowly but steadily + /// + /// This paragraph is ignored because there is no long help text for possible values. Slow, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,31 +1,29 @@ ```console $ 04_02_parse_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_02_parse_derive[EXE] +Usage: 04_02_parse_derive[EXE] -ARGS: - Network port to use +Arguments: + Network port to use -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 04_02_parse_derive 22 PORT = 22 $ 04_02_parse_derive foobar ? failed -error: Invalid value "foobar" for '': invalid digit found in string +error: Invalid value 'foobar' for '': invalid digit found in string -For more information try --help +For more information try '--help' $ 04_02_parse_derive 0 ? failed -error: Invalid value "0" for '': 0 is not in 1..=65535 +error: Invalid value '0' for '': 0 is not in 1..=65535 -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_parse.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,10 +1,10 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { /// Network port to use - #[clap(value_parser = clap::value_parser!(u16).range(1..))] + #[arg(value_parser = clap::value_parser!(u16).range(1..))] port: u16, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,31 +1,29 @@ ```console $ 04_02_validate_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_02_validate_derive[EXE] +Usage: 04_02_validate_derive[EXE] -ARGS: - Network port to use +Arguments: + Network port to use -OPTIONS: - -h, --help Print help information - -V, --version Print version information +Options: + -h, --help Print help information + -V, --version Print version information $ 04_02_validate_derive 22 PORT = 22 $ 04_02_validate_derive foobar ? failed -error: Invalid value "foobar" for '': `foobar` isn't a port number +error: Invalid value 'foobar' for '': `foobar` isn't a port number -For more information try --help +For more information try '--help' $ 04_02_validate_derive 0 ? failed -error: Invalid value "0" for '': Port not in range 1-65535 +error: Invalid value '0' for '': Port not in range 1-65535 -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_02_validate.rs 2023-02-01 05:24:55.000000000 +0000 @@ -3,10 +3,10 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { /// Network port to use - #[clap(value_parser = port_in_range)] + #[arg(value_parser = port_in_range)] port: u16, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,33 +1,30 @@ ```console $ 04_03_relations_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_03_relations_derive[EXE] [OPTIONS] <--set-ver |--major|--minor|--patch> [INPUT_FILE] +Usage: 04_03_relations_derive[EXE] [OPTIONS] <--set-ver |--major|--minor|--patch> [INPUT_FILE] -ARGS: - some regular input +Arguments: + [INPUT_FILE] some regular input -OPTIONS: - -c - -h, --help Print help information - --major auto inc major - --minor auto inc minor - --patch auto inc patch - --set-ver set version manually - --spec-in some special input argument - -V, --version Print version information +Options: + --set-ver set version manually + --major auto inc major + --minor auto inc minor + --patch auto inc patch + --spec-in some special input argument + -c + -h, --help Print help information + -V, --version Print version information $ 04_03_relations_derive ? failed error: The following required arguments were not provided: - <--set-ver |--major|--minor|--patch> + <--set-ver |--major|--minor|--patch> -USAGE: - 04_03_relations_derive[EXE] [OPTIONS] <--set-ver |--major|--minor|--patch> [INPUT_FILE] +Usage: 04_03_relations_derive[EXE] <--set-ver |--major|--minor|--patch> [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_03_relations_derive --major Version: 2.2.3 @@ -36,20 +33,18 @@ ? failed error: The argument '--major' cannot be used with '--minor' -USAGE: - 04_03_relations_derive[EXE] <--set-ver |--major|--minor|--patch> +Usage: 04_03_relations_derive[EXE] <--set-ver |--major|--minor|--patch> [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_03_relations_derive --major -c config.toml ? failed error: The following required arguments were not provided: - > + > -USAGE: - 04_03_relations_derive[EXE] -c <--set-ver |--major|--minor|--patch> > +Usage: 04_03_relations_derive[EXE] -c <--set-ver |--major|--minor|--patch> > -For more information try --help +For more information try '--help' $ 04_03_relations_derive --major -c config.toml --spec-in input.txt Version: 2.2.3 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_03_relations.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,38 +1,38 @@ use clap::{ArgGroup, Parser}; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] -#[clap(group( +#[command(author, version, about, long_about = None)] +#[command(group( ArgGroup::new("vers") .required(true) - .args(&["set-ver", "major", "minor", "patch"]), + .args(["set_ver", "major", "minor", "patch"]), ))] struct Cli { /// set version manually - #[clap(long, value_name = "VER", value_parser)] + #[arg(long, value_name = "VER")] set_ver: Option, /// auto inc major - #[clap(long, action)] + #[arg(long)] major: bool, /// auto inc minor - #[clap(long, action)] + #[arg(long)] minor: bool, /// auto inc patch - #[clap(long, action)] + #[arg(long)] patch: bool, /// some regular input - #[clap(group = "input", value_parser)] + #[arg(group = "input")] input_file: Option, /// some special input argument - #[clap(long, group = "input", value_parser)] + #[arg(long, group = "input")] spec_in: Option, - #[clap(short, requires = "input", value_parser)] + #[arg(short, requires = "input")] config: Option, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.md 2023-02-01 05:24:55.000000000 +0000 @@ -1,32 +1,29 @@ ```console $ 04_04_custom_derive --help -clap [..] A simple to use, efficient, and full-featured Command Line Argument Parser -USAGE: - 04_04_custom_derive[EXE] [OPTIONS] [INPUT_FILE] +Usage: 04_04_custom_derive[EXE] [OPTIONS] [INPUT_FILE] -ARGS: - some regular input +Arguments: + [INPUT_FILE] some regular input -OPTIONS: - -c - -h, --help Print help information - --major auto inc major - --minor auto inc minor - --patch auto inc patch - --set-ver set version manually - --spec-in some special input argument - -V, --version Print version information +Options: + --set-ver set version manually + --major auto inc major + --minor auto inc minor + --patch auto inc patch + --spec-in some special input argument + -c + -h, --help Print help information + -V, --version Print version information $ 04_04_custom_derive ? failed error: Can only modify one version field -USAGE: - clap [OPTIONS] [INPUT_FILE] +Usage: clap [OPTIONS] [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_04_custom_derive --major Version: 2.2.3 @@ -35,20 +32,18 @@ ? failed error: Can only modify one version field -USAGE: - clap [OPTIONS] [INPUT_FILE] +Usage: clap [OPTIONS] [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_04_custom_derive --major -c config.toml ? failed Version: 2.2.3 error: INPUT_FILE or --spec-in is required when using --config -USAGE: - clap [OPTIONS] [INPUT_FILE] +Usage: clap [OPTIONS] [INPUT_FILE] -For more information try --help +For more information try '--help' $ 04_04_custom_derive --major -c config.toml --spec-in input.txt Version: 2.2.3 diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/04_04_custom.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,33 +1,33 @@ -use clap::{CommandFactory, ErrorKind, Parser}; +use clap::error::ErrorKind; +use clap::{CommandFactory, Parser}; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { /// set version manually - #[clap(long, value_name = "VER", value_parser)] + #[arg(long, value_name = "VER")] set_ver: Option, /// auto inc major - #[clap(long, action)] + #[arg(long)] major: bool, /// auto inc minor - #[clap(long, action)] + #[arg(long)] minor: bool, /// auto inc patch - #[clap(long, action)] + #[arg(long)] patch: bool, /// some regular input - #[clap(value_parser)] input_file: Option, /// some special input argument - #[clap(long, value_parser)] + #[arg(long)] spec_in: Option, - #[clap(short, value_parser)] + #[arg(short)] config: Option, } @@ -73,8 +73,6 @@ // Check for usage of -c if let Some(config) = cli.config.as_deref() { - // todo: remove `#[allow(clippy::or_fun_call)]` lint when MSRV is bumped. - #[allow(clippy::or_fun_call)] let input = cli .input_file .as_deref() diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/05_01_assert.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/05_01_assert.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/05_01_assert.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/tutorial_derive/05_01_assert.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,10 +1,9 @@ use clap::Parser; #[derive(Parser)] -#[clap(author, version, about, long_about = None)] +#[command(author, version, about, long_about = None)] struct Cli { /// Network port to use - #[clap(value_parser)] port: u16, } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.md cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.md --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.md 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.md 2023-02-01 05:24:55.000000000 +0000 @@ -3,82 +3,129 @@ Help: ```console $ typed-derive --help -clap +Usage: typed-derive[EXE] [OPTIONS] -USAGE: - typed-derive[EXE] [OPTIONS] - -OPTIONS: - --bind Handle IP addresses - -D Hand-written parser for tuples - -h, --help Print help information - -I Allow invalid UTF-8 paths - -O Implicitly using `std::str::FromStr` - --sleep Allow human-readable durations +Options: + -O Implicitly using `std::str::FromStr` + -I Allow invalid UTF-8 paths + --bind Handle IP addresses + --sleep Allow human-readable durations + -D Hand-written parser for tuples + --port Support for discrete numbers [default: 22] [possible values: 22, 80] + --log-level Support enums from a foreign crate that don't implement `ValueEnum` [default: info] [possible values: info, debug, info, warn, error] + -h, --help Print help information ``` Optimization-level (number) ```console $ typed-derive -O 1 -Args { optimization: Some(1), include: None, bind: None, sleep: None, defines: [] } +Args { optimization: Some(1), include: None, bind: None, sleep: None, defines: [], port: 22, log_level: Info } $ typed-derive -O plaid ? failed -error: Invalid value "plaid" for '-O ': invalid digit found in string +error: Invalid value 'plaid' for '-O ': invalid digit found in string -For more information try --help +For more information try '--help' ``` Include (path) ```console $ typed-derive -I../hello -Args { optimization: None, include: Some("../hello"), bind: None, sleep: None, defines: [] } +Args { optimization: None, include: Some("../hello"), bind: None, sleep: None, defines: [], port: 22, log_level: Info } ``` IP Address ```console $ typed-derive --bind 192.0.0.1 -Args { optimization: None, include: None, bind: Some(192.0.0.1), sleep: None, defines: [] } +Args { optimization: None, include: None, bind: Some(192.0.0.1), sleep: None, defines: [], port: 22, log_level: Info } $ typed-derive --bind localhost ? failed -error: Invalid value "localhost" for '--bind ': invalid IP address syntax +error: Invalid value 'localhost' for '--bind ': invalid IP address syntax -For more information try --help +For more information try '--help' ``` Time ```console $ typed-derive --sleep 10s -Args { optimization: None, include: None, bind: None, sleep: Some(Duration(10s)), defines: [] } +Args { optimization: None, include: None, bind: None, sleep: Some(Duration(10s)), defines: [], port: 22, log_level: Info } $ typed-derive --sleep forever ? failed -error: Invalid value "forever" for '--sleep ': expected number at 0 +error: Invalid value 'forever' for '--sleep ': expected number at 0 -For more information try --help +For more information try '--help' ``` Defines (key-value pairs) ```console $ typed-derive -D Foo=10 -D Alice=30 -Args { optimization: None, include: None, bind: None, sleep: None, defines: [("Foo", 10), ("Alice", 30)] } +Args { optimization: None, include: None, bind: None, sleep: None, defines: [("Foo", 10), ("Alice", 30)], port: 22, log_level: Info } $ typed-derive -D Foo ? failed -error: Invalid value "Foo" for '-D ': invalid KEY=value: no `=` found in `Foo` +error: Invalid value 'Foo' for '-D ': invalid KEY=value: no `=` found in `Foo` -For more information try --help +For more information try '--help' $ typed-derive -D Foo=Bar ? failed -error: Invalid value "Foo=Bar" for '-D ': invalid digit found in string +error: Invalid value 'Foo=Bar' for '-D ': invalid digit found in string + +For more information try '--help' + +``` + +Discrete numbers +```console +$ typed-derive --port 22 +Args { optimization: None, include: None, bind: None, sleep: None, defines: [], port: 22, log_level: Info } + +$ typed-derive --port 80 +Args { optimization: None, include: None, bind: None, sleep: None, defines: [], port: 80, log_level: Info } + +$ typed-derive --port +? failed +error: The argument '--port ' requires a value but none was supplied + [possible values: 22, 80] + +For more information try '--help' + +$ typed-derive --port 3000 +? failed +error: '3000' isn't a valid value for '--port ' + [possible values: 22, 80] + +For more information try '--help' + +``` + +Enums from crates that can't implement `ValueEnum` +```console +$ typed-derive --log-level debug +Args { optimization: None, include: None, bind: None, sleep: None, defines: [], port: 22, log_level: Debug } + +$ typed-derive --log-level error +Args { optimization: None, include: None, bind: None, sleep: None, defines: [], port: 22, log_level: Error } + +$ typed-derive --log-level +? failed +error: The argument '--log-level ' requires a value but none was supplied + [possible values: info, debug, info, warn, error] + +For more information try '--help' + +$ typed-derive --log-level critical +? failed +error: 'critical' isn't a valid value for '--log-level ' + [possible values: info, debug, info, warn, error] -For more information try --help +For more information try '--help' ``` diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/examples/typed-derive.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,27 +1,47 @@ +use clap::builder::TypedValueParser as _; use clap::Parser; use std::error::Error; #[derive(Parser, Debug)] // requires `derive` feature +#[command(term_width = 0)] // Just to make testing across clap features easier struct Args { /// Implicitly using `std::str::FromStr` - #[clap(short = 'O', value_parser)] + #[arg(short = 'O')] optimization: Option, /// Allow invalid UTF-8 paths - #[clap(short = 'I', value_parser, value_name = "DIR", value_hint = clap::ValueHint::DirPath)] + #[arg(short = 'I', value_name = "DIR", value_hint = clap::ValueHint::DirPath)] include: Option, /// Handle IP addresses - #[clap(long, value_parser)] + #[arg(long)] bind: Option, /// Allow human-readable durations - #[clap(long, value_parser)] + #[arg(long)] sleep: Option, /// Hand-written parser for tuples - #[clap(short = 'D', value_parser = parse_key_val::)] + #[arg(short = 'D', value_parser = parse_key_val::)] defines: Vec<(String, i32)>, + + /// Support for discrete numbers + #[arg( + long, + default_value_t = 22, + value_parser = clap::builder::PossibleValuesParser::new(["22", "80"]) + .map(|s| s.parse::().unwrap()), + )] + port: usize, + + /// Support enums from a foreign crate that don't implement `ValueEnum` + #[arg( + long, + default_value_t = foreign_crate::LogLevel::Info, + value_parser = clap::builder::PossibleValuesParser::new(["info", "debug", "info", "warn", "error"]) + .map(|s| s.parse::().unwrap()), + )] + log_level: foreign_crate::LogLevel, } /// Parse a single key-value pair @@ -38,6 +58,44 @@ Ok((s[..pos].parse()?, s[pos + 1..].parse()?)) } +mod foreign_crate { + #[derive(Copy, Clone, PartialEq, Eq, Debug)] + pub enum LogLevel { + Trace, + Debug, + Info, + Warn, + Error, + } + + impl std::fmt::Display for LogLevel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + Self::Trace => "trace", + Self::Debug => "debug", + Self::Info => "info", + Self::Warn => "warn", + Self::Error => "error", + }; + s.fmt(f) + } + } + impl std::str::FromStr for LogLevel { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "trace" => Ok(Self::Trace), + "debug" => Ok(Self::Debug), + "info" => Ok(Self::Info), + "warn" => Ok(Self::Warn), + "error" => Ok(Self::Error), + _ => Err(format!("Unknown log level: {s}")), + } + } + } +} + fn main() { let args = Args::parse(); println!("{:?}", args); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/bin/stdio-fixture.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/bin/stdio-fixture.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/bin/stdio-fixture.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/bin/stdio-fixture.rs 2023-02-01 05:24:55.000000000 +0000 @@ -8,6 +8,7 @@ clap::Arg::new("verbose") .long("verbose") .help("log") + .action(clap::ArgAction::SetTrue) .long_help("more log"), ); cmd.get_matches(); diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/action.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/action.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/action.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/action.rs 2023-02-01 05:24:55.000000000 +0000 @@ -2,7 +2,8 @@ /// /// # Examples /// -/// ```rust +#[cfg_attr(not(feature = "help"), doc = " ```ignore")] +#[cfg_attr(feature = "help", doc = " ```")] /// # use clap::Command; /// # use clap::Arg; /// let cmd = Command::new("mycmd") @@ -26,6 +27,10 @@ pub enum ArgAction { /// When encountered, store the associated value(s) in [`ArgMatches`][crate::ArgMatches] /// + /// **NOTE:** If the argument has previously been seen, it will result in a + /// [`ArgumentConflict`][crate::error::ErrorKind::ArgumentConflict] unless + /// [`Command::args_override_self(true)`][crate::Command::args_override_self] is set. + /// /// # Examples /// /// ```rust @@ -40,7 +45,6 @@ /// /// let matches = cmd.try_get_matches_from(["mycmd", "--flag", "value"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_many::("flag").unwrap_or_default().map(|v| v.as_str()).collect::>(), /// vec!["value"] @@ -63,51 +67,12 @@ /// /// let matches = cmd.try_get_matches_from(["mycmd", "--flag", "value1", "--flag", "value2"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_many::("flag").unwrap_or_default().map(|v| v.as_str()).collect::>(), /// vec!["value1", "value2"] /// ); /// ``` Append, - /// Deprecated, replaced with [`ArgAction::Set`] or [`ArgAction::Append`] - /// - /// Builder: Instead of `arg.action(ArgAction::StoreValue)`, - /// - Use `arg.action(ArgAction::Set)` for single-occurrence arguments - /// - Use `arg.action(ArgAction::Append)` for multiple-occurrence arguments - /// - /// Derive: opt-in to the new behavior with `#[clap(action)]` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `ArgAction::Set` or `ArgAction::Append` - -Derive: opt-in to the new behavior with `#[clap(action)]` - -Builder: Instead of `arg.action(ArgAction::StoreValue)`, -- Use `arg.action(ArgAction::Set)` for single-occurrence arguments -- Use `arg.action(ArgAction::Append)` for multiple-occurrence arguments -" - ) - )] - StoreValue, - /// Deprecated, replaced with [`ArgAction::SetTrue`] or [`ArgAction::Count`] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `ArgAction::SetTrue` or `ArgAction::Count` - -Derive: opt-in to the new behavior with `#[clap(action)]` - -Builder: Instead of `arg.action(ArgAction::IncOccurrence)`, -- Use `arg.action(ArgAction::SetTrue)` if you just care if its set, then switch `matches.is_present` to `matches.get_flag` -- Use `arg.action(ArgAction::Count)` if you care how many times its set, then switch `matches.occurrences_of` to `matches.get_count` -" - ) - )] - IncOccurrence, /// When encountered, act as if `"true"` was encountered on the command-line /// /// If no [`default_value`][super::Arg::default_value] is set, it will be `false`. @@ -115,6 +80,10 @@ /// No value is allowed. To optionally accept a value, see /// [`Arg::default_missing_value`][super::Arg::default_missing_value] /// + /// **NOTE:** If the argument has previously been seen, it will result in a + /// [`ArgumentConflict`][crate::error::ErrorKind::ArgumentConflict] unless + /// [`Command::args_override_self(true)`][crate::Command::args_override_self] is set. + /// /// # Examples /// /// ```rust @@ -127,9 +96,8 @@ /// .action(clap::ArgAction::SetTrue) /// ); /// - /// let matches = cmd.clone().try_get_matches_from(["mycmd", "--flag", "--flag"]).unwrap(); + /// let matches = cmd.clone().try_get_matches_from(["mycmd", "--flag"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_one::("flag").copied(), /// Some(true) @@ -137,12 +105,46 @@ /// /// let matches = cmd.try_get_matches_from(["mycmd"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_one::("flag").copied(), /// Some(false) /// ); /// ``` + /// + /// You can use [`TypedValueParser::map`][crate::builder::TypedValueParser::map] to have the + /// flag control an application-specific type: + /// ```rust + /// # use clap::Command; + /// # use clap::Arg; + /// # use clap::builder::TypedValueParser as _; + /// # use clap::builder::BoolishValueParser; + /// let cmd = Command::new("mycmd") + /// .arg( + /// Arg::new("flag") + /// .long("flag") + /// .action(clap::ArgAction::SetTrue) + /// .value_parser( + /// BoolishValueParser::new() + /// .map(|b| -> usize { + /// if b { 10 } else { 5 } + /// }) + /// ) + /// ); + /// + /// let matches = cmd.clone().try_get_matches_from(["mycmd", "--flag"]).unwrap(); + /// assert!(matches.contains_id("flag")); + /// assert_eq!( + /// matches.get_one::("flag").copied(), + /// Some(10) + /// ); + /// + /// let matches = cmd.try_get_matches_from(["mycmd"]).unwrap(); + /// assert!(matches.contains_id("flag")); + /// assert_eq!( + /// matches.get_one::("flag").copied(), + /// Some(5) + /// ); + /// ``` SetTrue, /// When encountered, act as if `"false"` was encountered on the command-line /// @@ -151,6 +153,10 @@ /// No value is allowed. To optionally accept a value, see /// [`Arg::default_missing_value`][super::Arg::default_missing_value] /// + /// **NOTE:** If the argument has previously been seen, it will result in a + /// [`ArgumentConflict`][crate::error::ErrorKind::ArgumentConflict] unless + /// [`Command::args_override_self(true)`][crate::Command::args_override_self] is set. + /// /// # Examples /// /// ```rust @@ -163,9 +169,8 @@ /// .action(clap::ArgAction::SetFalse) /// ); /// - /// let matches = cmd.clone().try_get_matches_from(["mycmd", "--flag", "--flag"]).unwrap(); + /// let matches = cmd.clone().try_get_matches_from(["mycmd", "--flag"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_one::("flag").copied(), /// Some(false) @@ -173,7 +178,6 @@ /// /// let matches = cmd.try_get_matches_from(["mycmd"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_one::("flag").copied(), /// Some(true) @@ -201,7 +205,6 @@ /// /// let matches = cmd.clone().try_get_matches_from(["mycmd", "--flag", "--flag"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_count("flag"), /// 2 @@ -209,20 +212,20 @@ /// /// let matches = cmd.try_get_matches_from(["mycmd"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_count("flag"), /// 0 /// ); /// ``` Count, - /// When encountered, display [`Command::print_help`][super::App::print_help] + /// When encountered, display [`Command::print_help`][super::Command::print_help] /// - /// Depending on the flag, [`Command::print_long_help`][super::App::print_long_help] may be shown + /// Depending on the flag, [`Command::print_long_help`][super::Command::print_long_help] may be shown /// /// # Examples /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::Command; /// # use clap::Arg; /// let cmd = Command::new("mycmd") @@ -241,9 +244,9 @@ /// assert_eq!(err.kind(), clap::error::ErrorKind::DisplayHelp); /// ``` Help, - /// When encountered, display [`Command::version`][super::App::version] + /// When encountered, display [`Command::version`][super::Command::version] /// - /// Depending on the flag, [`Command::long_version`][super::App::long_version] may be shown + /// Depending on the flag, [`Command::long_version`][super::Command::long_version] may be shown /// /// # Examples /// @@ -278,10 +281,6 @@ match self { Self::Set => true, Self::Append => true, - #[allow(deprecated)] - Self::StoreValue => true, - #[allow(deprecated)] - Self::IncOccurrence => false, Self::SetTrue => false, Self::SetFalse => false, Self::Count => false, @@ -294,10 +293,6 @@ match self { Self::Set => None, Self::Append => None, - #[allow(deprecated)] - Self::StoreValue => None, - #[allow(deprecated)] - Self::IncOccurrence => None, Self::SetTrue => Some(std::ffi::OsStr::new("false")), Self::SetFalse => Some(std::ffi::OsStr::new("true")), Self::Count => Some(std::ffi::OsStr::new("0")), @@ -306,14 +301,22 @@ } } + pub(crate) fn default_missing_value(&self) -> Option<&'static std::ffi::OsStr> { + match self { + Self::Set => None, + Self::Append => None, + Self::SetTrue => Some(std::ffi::OsStr::new("true")), + Self::SetFalse => Some(std::ffi::OsStr::new("false")), + Self::Count => None, + Self::Help => None, + Self::Version => None, + } + } + pub(crate) fn default_value_parser(&self) -> Option { match self { Self::Set => None, Self::Append => None, - #[allow(deprecated)] - Self::StoreValue => None, - #[allow(deprecated)] - Self::IncOccurrence => None, Self::SetTrue => Some(super::ValueParser::bool()), Self::SetFalse => Some(super::ValueParser::bool()), Self::Count => Some(crate::value_parser!(u8).into()), @@ -329,12 +332,8 @@ match self { Self::Set => None, Self::Append => None, - #[allow(deprecated)] - Self::StoreValue => None, - #[allow(deprecated)] - Self::IncOccurrence => None, - Self::SetTrue => Some(AnyValueId::of::()), - Self::SetFalse => Some(AnyValueId::of::()), + Self::SetTrue => None, + Self::SetFalse => None, Self::Count => Some(AnyValueId::of::()), Self::Help => None, Self::Version => None, diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/app_settings.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/app_settings.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/app_settings.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/app_settings.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,5 @@ -#![allow(deprecated)] - // Std use std::ops::BitOr; -#[cfg(feature = "yaml")] -use std::str::FromStr; #[allow(unused)] use crate::Arg; @@ -15,7 +11,7 @@ #[doc(hidden)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct AppFlags(Flags); +pub(crate) struct AppFlags(Flags); impl Default for AppFlags { fn default() -> Self { @@ -31,938 +27,38 @@ /// [`Command`]: crate::Command #[derive(Debug, PartialEq, Copy, Clone)] #[non_exhaustive] -pub enum AppSettings { - /// Deprecated, replaced with [`Command::ignore_errors`] - /// - /// Derive: replace `#[clap(setting = IgnoreErrors)]` with `#[clap(ignore_errors = true)]` - /// - /// Builder: replace `cmd.setting(IgnoreErrors)` with `cmd.ignore_errors = true` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::ignore_errors` - -Derive: replace `#[clap(setting = IgnoreErrors)]` with `#[clap(ignore_errors = true)]` - -Builder: replace `cmd.setting(IgnoreErrors)` with `cmd.ignore_errors(true)` -" - ) - )] +pub(crate) enum AppSettings { IgnoreErrors, - - /// Deprecated, replace - /// ```rust,no_run - /// let cmd = clap::Command::new("cmd") - /// .global_setting(clap::AppSettings::WaitOnError) - /// .arg(clap::arg!(--flag)); - /// let m = cmd.get_matches(); - /// ``` - /// with - /// ```rust - /// let cmd = clap::Command::new("cmd") - /// .arg(clap::arg!(--flag)); - /// let m = match cmd.try_get_matches() { - /// Ok(m) => m, - /// Err(err) => { - /// if err.use_stderr() { - /// let _ = err.print(); - /// - /// eprintln!("\nPress [ENTER] / [RETURN] to continue..."); - /// use std::io::BufRead; - /// let mut s = String::new(); - /// let i = std::io::stdin(); - /// i.lock().read_line(&mut s).unwrap(); - /// - /// std::process::exit(2); - /// } else { - /// let _ = err.print(); - /// std::process::exit(0); - /// } - /// } - /// }; - /// ``` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "See documentation for how to hand-implement this" - ) - )] - WaitOnError, - - /// Deprecated, replaced with [`Command::allow_hyphen_values`] and - /// [`Arg::is_allow_hyphen_values_set`] - /// - /// Derive: replace `#[clap(setting = AllowHyphenValues)]` with `#[clap(allow_hyphen_values = true)]` - /// - /// Builder: replace `cmd.setting(AllowHyphenValues)` with `cmd.allow_hyphen_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::allow_hyphen_values` and `Arg::is_allow_hyphen_values_set` - -Derive: replace `#[clap(setting = AllowHyphenValues)]` with `#[clap(allow_hyphen_values = true)]` - -Builder: replace `cmd.setting(AllowHyphenValues)` with `cmd.allow_hyphen_values(true)` -" - ) - )] AllowHyphenValues, - - /// Deprecated, replaced with [`Command::allow_negative_numbers`] and - /// [`Command::is_allow_negative_numbers_set`] - /// - /// Derive: replace `#[clap(setting = AllowNegativeNumbers)]` with `#[clap(allow_negative_numbers = true)]` - /// - /// Builder: replace `cmd.setting(AllowNegativeNumbers)` with `cmd.allow_negative_numbers(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::allow_negative_numbers` and `Command::is_allow_negative_numbers_set` - -Derive: replace `#[clap(setting = AllowNegativeNumbers)]` with `#[clap(allow_negative_numbers = true)]` - -Builder: replace `cmd.setting(AllowNegativeNumbers)` with `cmd.allow_negative_numbers(true)` -" - ) - )] AllowNegativeNumbers, - - /// Deprecated, replaced with [`ArgAction::Set`][super::ArgAction::Set] - /// - /// The new actions (`ArgAction::Set`, `ArgAction::SetTrue`) do this by default. - /// - /// See `ArgAction::StoreValue` and `ArgAction::IncOccurrence` for how to migrate - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::action(ArgAction::...)` - -The new actions (`ArgAction::Set`, `ArgAction::SetTrue`) do this by default. - -See `ArgAction::StoreValue` and `ArgAction::IncOccurrence` for how to migrate -" - ) - )] AllArgsOverrideSelf, - - /// Deprecated, replaced with [`Command::allow_missing_positional`] and - /// [`Command::is_allow_missing_positional_set`] - /// - /// Derive: replace `#[clap(setting = AllowMissingPositional)]` with `#[clap(allow_missing_positional = true)]` - /// - /// Builder: replace `cmd.setting(AllowMissingPositional)` with `cmd.allow_missing_positional(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::allow_missing_positional` and `Command::is_allow_missing_positional_set` - -Derive: replace `#[clap(setting = AllowMissingPositional)]` with `#[clap(allow_missing_positional = true)]` - -Builder: replace `cmd.setting(AllowMissingPositional)` with `cmd.allow_missing_positional(true)` -" - ) - )] AllowMissingPositional, - - /// Deprecated, replaced with [`Command::trailing_var_arg`] and [`Command::is_trailing_var_arg_set`] - /// - /// Derive: replace `#[clap(setting = TrailingVarArg)]` with `#[clap(trailing_var_arg = true)]` - /// - /// Builder: replace `cmd.setting(TrailingVarArg)` with `cmd.trailing_var_arg(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::trailing_var_arg` and `Command::is_trailing_var_arg_set` - -Derive: replace `#[clap(setting = TrailingVarArg)]` with `#[clap(trailing_var_arg = true)]` - -Builder: replace `cmd.setting(TrailingVarArg)` with `cmd.trailing_var_arg(true)` -" - ) - )] TrailingVarArg, - - /// Deprecated, replaced with [`Command::dont_delimit_trailing_values`] and - /// [`Command::is_dont_delimit_trailing_values_set`] - /// - /// Derive: replace `#[clap(setting = DontDelimitTrailingValues)]` with `#[clap(dont_delimit_trailing_values = true)]` - /// - /// Builder: replace `cmd.setting(DontDelimitTrailingValues)` with `cmd.dont_delimit_trailing_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::dont_delimit_trailing_values` and `Command::is_dont_delimit_trailing_values_set` - -Derive: replace `#[clap(setting = DontDelimitTrailingValues)]` with `#[clap(dont_delimit_trailing_values = true)]` - -Builder: replace `cmd.setting(DontDelimitTrailingValues)` with `cmd.dont_delimit_trailing_values(true)` -" - ) - )] DontDelimitTrailingValues, - - /// Deprecated, replaced with [`Command::infer_long_args`] - /// - /// Derive: replace `#[clap(setting = InferLongArgs)]` with `#[clap(infer_long_args = true)]` - /// - /// Builder: replace `cmd.setting(InferLongArgs)` with `cmd.infer_long_args(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::infer_long_args` - -Derive: replace `#[clap(setting = InferLongArgs)]` with `#[clap(infer_long_args = true)]` - -Builder: replace `cmd.setting(InferLongArgs)` with `cmd.infer_long_args(true)` -" - ) - )] InferLongArgs, - - /// Deprecated, replaced with [`Command::infer_subcommands`] - /// - /// Derive: replace `#[clap(setting = InferSubcommands)]` with `#[clap(infer_subcommands = true)]` - /// - /// Builder: replace `cmd.setting(InferSubcommands)` with `cmd.infer_subcommands(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::infer_subcommands` - -Derive: replace `#[clap(setting = InferSubcommands)]` with `#[clap(infer_subcommands = true)]` - -Builder: replace `cmd.setting(InferSubcommands)` with `cmd.infer_subcommands(true)` -" - ) - )] InferSubcommands, - - /// Deprecated, replaced with [`Command::subcommand_required`] and - /// [`Command::is_subcommand_required_set`] - /// - /// Derive: replace `#[clap(setting = SubcommandRequired)]` with `#[clap(subcommand_required = true)]` - /// - /// Builder: replace `cmd.setting(SubcommandRequired)` with `cmd.subcommand_required(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::subcommand_required` and `Command::is_subcommand_required_set` - -Derive: replace `#[clap(setting = SubcommandRequired)]` with `#[clap(subcommand_required = true)]` - -Builder: replace `cmd.setting(SubcommandRequired)` with `cmd.subcommand_required(true)` -" - ) - )] SubcommandRequired, - - /// Deprecated, replaced with [`Command::subcommand_required`] combined with - /// [`Command::arg_required_else_help`]. - /// - /// Derive: replace `#[clap(setting = SubcommandRequiredElseHelp)]` with `#[clap(subcommand_required = true, arg_required_else_help = true)]` - /// - /// Builder: replace `cmd.setting(SubcommandRequiredElseHelp)` with `cmd.subcommand_required(true).arg_required_else_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::subcommand_required` combined with `Command::arg_required_else_help` - -Derive: replace `#[clap(setting = SubcommandRequiredElseHelp)]` with `#[clap(subcommand_required = true, arg_required_else_help = true)]` - -Builder: replace `cmd.setting(SubcommandRequiredElseHelp)` with `cmd.subcommand_required(true).arg_required_else_help(true)` -" - ) - )] - SubcommandRequiredElseHelp, - - /// Deprecated, replaced with [`Command::allow_external_subcommands`] and - /// [`Command::is_allow_external_subcommands_set`] - /// - /// Derive: replace `#[clap(setting = AllowExternalSubcommands)]` with `#[clap(allow_external_subcommands = true)]` - /// - /// Builder: replace `cmd.setting(AllowExternalSubcommands)` with `cmd.allow_external_subcommands(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::allow_external_subcommands` and `Command::is_allow_external_subcommands_set` - -Derive: replace `#[clap(setting = AllowExternalSubcommands)]` with `#[clap(allow_external_subcommands = true)]` - -Builder: replace `cmd.setting(AllowExternalSubcommands)` with `cmd.allow_external_subcommands(true)` -" - ) - )] AllowExternalSubcommands, - - /// Deprecated, replaced with [`Command::multicall`] and [`Command::is_multicall_set`] - /// - /// Derive: replace `#[clap(setting = Multicall)]` with `#[clap(multicall = true)]` - /// - /// Builder: replace `cmd.setting(Multicall)` with `cmd.multicall(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::multicall` and `Command::is_multicall_set` - -Derive: replace `#[clap(setting = Multicall)]` with `#[clap(multicall = true)]` - -Builder: replace `cmd.setting(Multicall)` with `cmd.multicall(true)` -" - ) - )] Multicall, - - /// Deprecated, replaced with [`Command::allow_invalid_utf8_for_external_subcommands`] and [`Command::is_allow_invalid_utf8_for_external_subcommands_set`] - /// - /// Derive: replace `#[clap(setting = AllowInvalidUtf8ForExternalSubcommands)]` with `#[clap(allow_invalid_utf8_for_external_subcommands = true)]` - /// - /// Builder: replace `cmd.setting(AllowInvalidUtf8ForExternalSubcommands)` with `cmd.allow_invalid_utf8_for_external_subcommands(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::allow_invalid_utf8_for_external_subcommands` and `Command::is_allow_invalid_utf8_for_external_subcommands_set` - -Derive: replace `#[clap(setting = AllowInvalidUtf8ForExternalSubcommands)]` with `#[clap(allow_invalid_utf8_for_external_subcommands = true)]` - -Builder: replace `cmd.setting(AllowInvalidUtf8ForExternalSubcommands)` with `cmd.allow_invalid_utf8_for_external_subcommands(true)` -" - ) - )] - AllowInvalidUtf8ForExternalSubcommands, - - /// Deprecated, this is now the default - /// - /// Derive: remove `#[clap(setting = UseLongFormatForHelpSubcommand)]` - /// - /// Builder: remove `cmd.setting(UseLongFormatForHelpSubcommand)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "This is now the default - -Derive: remove `#[clap(setting = UseLongFormatForHelpSubcommand)]` - -Builder: remove `cmd.setting(UseLongFormatForHelpSubcommand)` -" - ) - )] - UseLongFormatForHelpSubcommand, - - /// Deprecated, replaced with [`Command::subcommand_negates_reqs`] and - /// [`Command::is_subcommand_negates_reqs_set`] - /// - /// Derive: replace `#[clap(setting = SubcommandsNegateReqs)]` with `#[clap(subcommand_negates_reqs = true)]` - /// - /// Builder: replace `cmd.setting(SubcommandsNegateReqs)` with `cmd.subcommand_negates_reqs(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::subcommand_negates_reqs` and `Command::is_subcommand_negates_reqs_set` - -Derive: replace `#[clap(setting = SubcommandsNegateReqs)]` with `#[clap(subcommand_negates_reqs = true)]` - -Builder: replace `cmd.setting(SubcommandsNegateReqs)` with `cmd.subcommand_negates_reqs(true)` -" - ) - )] SubcommandsNegateReqs, - - /// Deprecated, replaced with [`Command::args_conflicts_with_subcommands`] and - /// [`Command::is_args_conflicts_with_subcommands_set`] - /// - /// Derive: replace `#[clap(setting = ArgsNegateSubcommands)]` with `#[clap(args_conflicts_with_subcommands = true)]` - /// - /// Builder: replace `cmd.setting(ArgsNegateSubcommands)` with `cmd.args_conflicts_with_subcommands(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::args_conflicts_with_subcommands` and `Command::is_args_conflicts_with_subcommands_set` - -Derive: replace `#[clap(setting = ArgsNegateSubcommands)]` with `#[clap(args_conflicts_with_subcommands = true)]` - -Builder: replace `cmd.setting(ArgsNegateSubcommands)` with `cmd.args_conflicts_with_subcommands(true)` -" - ) - )] ArgsNegateSubcommands, - - /// Deprecated, replaced with [`Command::subcommand_precedence_over_arg`] and - /// [`Command::is_subcommand_precedence_over_arg_set`] - /// - /// Derive: replace `#[clap(setting = SubcommandPrecedenceOverArg)]` with `#[clap(subcommand_precedence_over_arg = true)]` - /// - /// Builder: replace `cmd.setting(SubcommandPrecedenceOverArg)` with `cmd.subcommand_precedence_over_arg(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::subcommand_precedence_over_arg` and `Command::is_subcommand_precedence_over_arg_set` - -Derive: replace `#[clap(setting = SubcommandPrecedenceOverArg)]` with `#[clap(subcommand_precedence_over_arg = true)]` - -Builder: replace `cmd.setting(SubcommandPrecedenceOverArg)` with `cmd.subcommand_precedence_over_arg(true)` -" - ) - )] SubcommandPrecedenceOverArg, - - /// Deprecated, replaced with [`Command::arg_required_else_help`] and - /// [`Command::is_arg_required_else_help_set`] - /// - /// Derive: replace `#[clap(setting = ArgRequiredElseHelp)]` with `#[clap(arg_required_else_help = true)]` - /// - /// Builder: replace `cmd.setting(ArgRequiredElseHelp)` with `cmd.arg_required_else_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::arg_required_else_help` and `Command::is_arg_required_else_help_set` - -Derive: replace `#[clap(setting = ArgRequiredElseHelp)]` with `#[clap(arg_required_else_help = true)]` - -Builder: replace `cmd.setting(ArgRequiredElseHelp)` with `cmd.arg_required_else_help(true)` -" - ) - )] ArgRequiredElseHelp, - - /// Displays the arguments and [`subcommands`] in the help message in the order that they were - /// declared in, and not alphabetically which is the default. - /// - /// To override the declaration order, see [`Arg::display_order`] and [`Command::display_order`]. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Command, Arg, AppSettings}; - /// Command::new("myprog") - /// .global_setting(AppSettings::DeriveDisplayOrder) - /// .get_matches(); - /// ``` - /// - /// [`subcommands`]: crate::Command::subcommand() - /// [`Arg::display_order`]: crate::Arg::display_order - /// [`Command::display_order`]: crate::Command::display_order - DeriveDisplayOrder, - - /// Deprecated, replaced with [`Command::dont_collapse_args_in_usage`] and - /// [`Command::is_dont_collapse_args_in_usage_set`] - /// - /// Derive: replace `#[clap(setting = DontCollapseArgsInUsage)]` with `#[clap(dont_collapse_args_in_usage = true)]` - /// - /// Builder: replace `cmd.setting(DontCollapseArgsInUsage)` with `cmd.dont_collapse_args_in_usage(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::dont_collapse_args_in_usage` and `Command::is_dont_collapse_args_in_usage_set` - -Derive: replace `#[clap(setting = DontCollapseArgsInUsage)]` with `#[clap(dont_collapse_args_in_usage = true)]` - -Builder: replace `cmd.setting(DontCollapseArgsInUsage)` with `cmd.dont_collapse_args_in_usage(true)` -" - ) - )] - DontCollapseArgsInUsage, - - /// Deprecated, replaced with [`Command::next_line_help`] and [`Command::is_next_line_help_set`] - /// - /// Derive: replace `#[clap(setting = NextLineHelp)]` with `#[clap(next_line_help = true)]` - /// - /// Builder: replace `cmd.setting(NextLineHelp)` with `cmd.next_line_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::next_line_help` and `Command::is_next_line_help_set` - -Derive: replace `#[clap(setting = NextLineHelp)]` with `#[clap(next_line_help = true)]` - -Builder: replace `cmd.setting(NextLineHelp)` with `cmd.next_line_help(true)` -" - ) - )] NextLineHelp, - - /// Deprecated, replaced with [`Command::disable_colored_help`] and - /// [`Command::is_disable_colored_help_set`] - /// - /// Derive: replace `#[clap(setting = DisableColoredHelp)]` with `#[clap(disable_colored_help = true)]` - /// - /// Builder: replace `cmd.setting(DisableColoredHelp)` with `cmd.disable_colored_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::disable_colored_help` and `Command::is_disable_colored_help_set` - -Derive: replace `#[clap(setting = DisableColoredHelp)]` with `#[clap(disable_colored_help = true)]` - -Builder: replace `cmd.setting(DisableColoredHelp)` with `cmd.disable_colored_help(true)` -" - ) - )] DisableColoredHelp, - - /// Deprecated, replaced with [`Command::disable_help_flag`] and [`Command::is_disable_help_flag_set`] - /// - /// Derive: replace `#[clap(setting = DisableHelpFlag)]` with `#[clap(disable_help_flag = true)]` - /// - /// Builder: replace `cmd.setting(DisableHelpFlag)` with `cmd.disable_help_flag(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::disable_help_flag` and `Command::is_disable_help_flag_set` - -Derive: replace `#[clap(setting = DisableHelpFlag)]` with `#[clap(disable_help_flag = true)]` - -Builder: replace `cmd.setting(DisableHelpFlag)` with `cmd.disable_help_flag(true)` -" - ) - )] DisableHelpFlag, - - /// Deprecated, replaced with [`Command::disable_help_subcommand`] and - /// [`Command::is_disable_help_subcommand_set`] - /// - /// Derive: replace `#[clap(setting = DisableHelpSubcommand)]` with `#[clap(disable_help_subcommand = true)]` - /// - /// Builder: replace `cmd.setting(DisableHelpSubcommand)` with `cmd.disable_help_subcommand(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::disable_help_subcommand` and `Command::is_disable_help_subcommand_set` - -Derive: replace `#[clap(setting = DisableHelpSubcommand)]` with `#[clap(disable_help_subcommand = true)]` - -Builder: replace `cmd.setting(DisableHelpSubcommand)` with `cmd.disable_help_subcommand(true)` -" - ) - )] DisableHelpSubcommand, - - /// Deprecated, replaced with [`Command::disable_version_flag`] and - /// [`Command::is_disable_version_flag_set`] - /// - /// Derive: replace `#[clap(setting = DisableVersionFlag)]` with `#[clap(disable_version_flag = true)]` - /// - /// Builder: replace `cmd.setting(DisableVersionFlag)` with `cmd.disable_version_flag(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::disable_version_flag` and `Command::is_disable_version_flag_set` - -Derive: replace `#[clap(setting = DisableVersionFlag)]` with `#[clap(disable_version_flag = true)]` - -Builder: replace `cmd.setting(DisableVersionFlag)` with `cmd.disable_version_flag(true)` -" - ) - )] DisableVersionFlag, - - /// Deprecated, replaced with [`Command::propagate_version`] and [`Command::is_propagate_version_set`] - /// - /// Derive: replace `#[clap(setting = PropagateVersion)]` with `#[clap(propagate_version = true)]` - /// - /// Builder: replace `cmd.setting(PropagateVersion)` with `cmd.propagate_version(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::propagate_version` and `Command::is_propagate_version_set` - -Derive: replace `#[clap(setting = PropagateVersion)]` with `#[clap(propagate_version = true)]` - -Builder: replace `cmd.setting(PropagateVersion)` with `cmd.propagate_version(true)` -" - ) - )] PropagateVersion, - - /// Deprecated, replaced with [`Command::hide`] and [`Command::is_hide_set`] - /// - /// Derive: replace `#[clap(setting = Hidden)]` with `#[clap(hide = true)]` - /// - /// Builder: replace `cmd.setting(Hidden)` with `cmd.hide(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::hide` and `Command::is_hide_set` - -Derive: replace `#[clap(setting = Hidden)]` with `#[clap(hide = true)]` - -Builder: replace `cmd.setting(Hidden)` with `cmd.hide(true)` -" - ) - )] Hidden, - - /// Deprecated, replaced with [`Command::hide_possible_values`] and - /// [`Arg::is_hide_possible_values_set`] - /// - /// Derive: replace `#[clap(setting = HidePossibleValues)]` with `#[clap(hide_possible_values = true)]` - /// - /// Builder: replace `cmd.setting(HidePossibleValues)` with `cmd.hide_possible_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::hide_possible_values` and `Arg::is_hide_possible_values_set` - -Derive: replace `#[clap(setting = HidePossibleValues)]` with `#[clap(hide_possible_values = true)]` - -Builder: replace `cmd.setting(HidePossibleValues)` with `cmd.hide_possible_values(true)` -" - ) - )] HidePossibleValues, - - /// Deprecated, replaced with [`Command::help_expected`] - /// - /// Derive: replace `#[clap(setting = HelpExpected)]` with `#[clap(help_expected = true)]` - /// - /// Builder: replace `cmd.setting(HelpExpected)` with `cmd.help_expected(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::help_expected` - -Derive: replace `#[clap(setting = HelpExpected)]` with `#[clap(help_expected = true)]` - -Builder: replace `cmd.setting(HelpExpected)` with `cmd.help_expected(true)` -" - ) - )] HelpExpected, - - /// Deprecated, replaced with [`Command::no_binary_name`] - /// - /// Derive: replace `#[clap(setting = NoBinaryName)]` with `#[clap(no_binary_name = true)]` - /// - /// Builder: replace `cmd.setting(NoBinaryName)` with `cmd.no_binary_name(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Command::no_binary_name` - -Derive: replace `#[clap(setting = NoBinaryName)]` with `#[clap(no_binary_name = true)]` - -Builder: replace `cmd.setting(NoBinaryName)` with `cmd.no_binary_name(true)` -" - ) - )] NoBinaryName, - - /// Deprecated, replaced with [`Arg::action`][super::Arg::action] - /// - /// Derive: replace `#[clap(setting = NoAutoHelp)]` with setting an explicit action on your help argument - /// - /// Builder: replace `cmd.setting(NoAutoHelp)` with setting an explicit action on your help argument - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::action` - -Derive: replace `#[clap(setting = NoAutoHelp)]` with setting an explicit action on your help argument - -Builder: replace `cmd.setting(NoAutoHelp)` with setting an explicit action on your help argument -" - ) - )] - NoAutoHelp, - - /// Deprecated, replaced with [`Arg::action`][super::Arg::action] - /// - /// Derive: replace `#[clap(setting = NoAutoVersion)]` with setting an explicit action on your version argument - /// - /// Builder: replace `cmd.setting(NoAutoVersion)` with setting an explicit action on your version argument - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::action` - -Derive: replace `#[clap(setting = NoAutoVersion)]` with setting an explicit action on your version argument - -Builder: replace `cmd.setting(NoAutoVersion)` with setting an explicit action on your version argument -" - ) - )] - NoAutoVersion, - - /// Deprecated, replaced with [`Command::allow_hyphen_values`] - /// - /// Derive: replace `#[clap(setting = AllowLeadingHyphen)]` with `#[clap(allow_hyphen_values = true)]` - /// - /// Builder: replace `cmd.setting(AllowLeadingHyphen)` with `cmd.allow_hyphen_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::allow_hyphen_values` - -Derive: replace `#[clap(setting = AllowLeadingHyphen)]` with `#[clap(allow_hyphen_values = true)]` - -Builder: replace `cmd.setting(AllowLeadingHyphen)` with `cmd.allow_hyphen_values(true)` -" - ) - )] - #[doc(hidden)] - AllowLeadingHyphen, - - /// Deprecated, replaced with [`Command::allow_invalid_utf8_for_external_subcommands`] and [`Command::is_allow_invalid_utf8_for_external_subcommands_set`] - /// - /// Derive: replace `#[clap(setting = StrictUtf8)]` with `#[clap(allow_invalid_utf8_for_external_subcommands = true)]` - /// - /// Builder: replace `cmd.setting(StrictUtf8)` with `cmd.allow_invalid_utf8_for_external_subcommands(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::allow_invalid_utf8_for_external_subcommands` and `Command::is_allow_invalid_utf8_for_external_subcommands_set` - -Derive: replace `#[clap(setting = StrictUtf8)]` with `#[clap(allow_invalid_utf8_for_external_subcommands = true)]` - -Builder: replace `cmd.setting(StrictUtf8)` with `cmd.allow_invalid_utf8_for_external_subcommands(true)` -" - ) - )] - #[doc(hidden)] - StrictUtf8, - - /// Deprecated, this is now the default - /// - /// Derive: remove `#[clap(setting = UnifiedHelpMessage)]` - /// - /// Builder: remove `cmd.setting(UnifiedHelpMessage)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "This is now the default - -Derive: remove `#[clap(setting = UnifiedHelpMessage)]` - -Builder: remove `cmd.setting(UnifiedHelpMessage)` -" - ) - )] - #[doc(hidden)] - UnifiedHelpMessage, - - /// Deprecated, this is now the default - /// - /// Derive: remove `#[clap(setting = ColoredHelp)]` - /// - /// Builder: remove `cmd.setting(ColoredHelp)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "This is now the default - -Derive: remove `#[clap(setting = ColoredHelp)]` - -Builder: remove `cmd.setting(ColoredHelp)` -" - ) - )] - #[doc(hidden)] - ColoredHelp, - - /// Deprecated, see [`Command::color`][crate::Command::color] - /// - /// Derive: replace `#[clap(setting = ColorAuto)]` with `#[clap(color = ColorChoice::Auto)]`` - /// - /// Builder: replace `cmd.setting(ColorAuto)` with `cmd.color(Color::Auto)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::color` - -Derive: replace `#[clap(setting = ColorAuto)]` with `#[clap(color = ColorChoice::Auto)]`` - -Builder: replace `cmd.setting(ColorAuto)` with `cmd.color(Color::Auto)` -" - ) - )] - #[doc(hidden)] + #[allow(dead_code)] ColorAuto, - - /// Deprecated, replaced with [`Command::color`][crate::Command::color] - /// - /// Derive: replace `#[clap(setting = ColorAlways)]` with `#[clap(color = ColorChoice::Always)]`` - /// - /// Builder: replace `cmd.setting(ColorAlways)` with `cmd.color(Color::Always)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::color` - -Derive: replace `#[clap(setting = ColorAlways)]` with `#[clap(color = ColorChoice::Always)]`` - -Builder: replace `cmd.setting(ColorAlways)` with `cmd.color(Color::Always)` -" - ) - )] - #[doc(hidden)] ColorAlways, - - /// Deprecated, replaced with [`Command::color`][crate::Command::color] - /// - /// Derive: replace `#[clap(setting = ColorNever)]` with `#[clap(color = ColorChoice::Never)]`` - /// - /// Builder: replace `cmd.setting(ColorNever)` with `cmd.color(Color::Never)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::color` - -Derive: replace `#[clap(setting = ColorNever)]` with `#[clap(color = ColorChoice::Never)]`` - -Builder: replace `cmd.setting(ColorNever)` with `cmd.color(Color::Never)` -" - ) - )] - #[doc(hidden)] ColorNever, - - /// Deprecated, replaced with [`Command::disable_help_flag`] and [`Command::is_disable_help_flag_set`] - /// - /// Derive: replace `#[clap(setting = DisableHelpFlags)]` with `#[clap(disable_help_flag = true)]` - /// - /// Builder: replace `cmd.setting(DisableHelpFlags)` with `cmd.disable_help_flag(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::disable_help_flag` and `Command::is_disable_help_flag_set` - -Derive: replace `#[clap(setting = DisableHelpFlags)]` with `#[clap(disable_help_flag = true)]` - -Builder: replace `cmd.setting(DisableHelpFlags)` with `cmd.disable_help_flag(true)` -" - ) - )] - #[doc(hidden)] - DisableHelpFlags, - - /// Deprecated, replaced with [`Command::disable_version_flag`] and - /// [`Command::is_disable_version_flag_set`] - /// - /// Derive: replace `#[clap(setting = DisableVersion)]` with `#[clap(disable_version_flag = true)]` - /// - /// Builder: replace `cmd.setting(DisableVersion)` with `cmd.disable_version_flag(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::disable_version_flag` and `Command::is_disable_version_flag_set` - -Derive: replace `#[clap(setting = DisableVersion)]` with `#[clap(disable_version_flag = true)]` - -Builder: replace `cmd.setting(DisableVersion)` with `cmd.disable_version_flag(true)` -" - ) - )] - #[doc(hidden)] - DisableVersion, - - /// Deprecated, replaced with [`Command::propagate_version`] and [`Command::is_propagate_version_set`] - /// - /// Derive: replace `#[clap(setting = GlobalVersion)]` with `#[clap(propagate_version = true)]` - /// - /// Builder: replace `cmd.setting(GlobalVersion)` with `cmd.propagate_version(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::propagate_version` and `Command::is_propagate_version_set` - -Derive: replace `#[clap(setting = GlobalVersion)]` with `#[clap(propagate_version = true)]` - -Builder: replace `cmd.setting(GlobalVersion)` with `cmd.propagate_version(true)` -" - ) - )] - #[doc(hidden)] - GlobalVersion, - - /// Deprecated, replaced with [`Command::hide_possible_values`] and - /// [`Arg::is_hide_possible_values_set`] - /// - /// Derive: replace `#[clap(setting = HidePossibleValuesInHelp)]` with `#[clap(hide_possible_values = true)]` - /// - /// Builder: replace `cmd.setting(HidePossibleValuesInHelp)` with `cmd.hide_possible_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Command::hide_possible_values` and `Arg::is_hide_possible_values_set` - -Derive: replace `#[clap(setting = HidePossibleValuesInHelp)]` with `#[clap(hide_possible_values = true)]` - -Builder: replace `cmd.setting(HidePossibleValuesInHelp)` with `cmd.hide_possible_values(true)` -" - ) - )] - #[doc(hidden)] - HidePossibleValuesInHelp, - - /// Deprecated, this is now the default - /// - /// Derive: remove `#[clap(setting = UnifiedHelp)]` - /// - /// Builder: remove `cmd.setting(UnifiedHelp)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "This is now the default - -Derive: remove `#[clap(setting = UnifiedHelp)]` - -Builder: remove `cmd.setting(UnifiedHelp)` -" - ) - )] - #[doc(hidden)] - UnifiedHelp, - - /// If the cmd is already built, used for caching. - #[doc(hidden)] Built, - - /// If the cmd's bin name is already built, used for caching. - #[doc(hidden)] BinNameBuilt, } @@ -974,19 +70,14 @@ const PROPAGATE_VERSION = 1 << 3; const DISABLE_VERSION_FOR_SC = 1 << 4; const WAIT_ON_ERROR = 1 << 6; - const SC_REQUIRED_ELSE_HELP = 1 << 7; - const NO_AUTO_HELP = 1 << 8; - const NO_AUTO_VERSION = 1 << 9; const DISABLE_VERSION_FLAG = 1 << 10; const HIDDEN = 1 << 11; const TRAILING_VARARG = 1 << 12; const NO_BIN_NAME = 1 << 13; const ALLOW_UNK_SC = 1 << 14; - const SC_UTF8_NONE = 1 << 15; const LEADING_HYPHEN = 1 << 16; const NO_POS_VALUES = 1 << 17; const NEXT_LINE_HELP = 1 << 18; - const DERIVE_DISP_ORDER = 1 << 19; const DISABLE_COLORED_HELP = 1 << 20; const COLOR_ALWAYS = 1 << 21; const COLOR_AUTO = 1 << 22; @@ -994,7 +85,6 @@ const DONT_DELIM_TRAIL = 1 << 24; const ALLOW_NEG_NUMS = 1 << 25; const DISABLE_HELP_SC = 1 << 27; - const DONT_COLLAPSE_ARGS = 1 << 28; const ARGS_NEGATE_SCS = 1 << 29; const PROPAGATE_VALS_DOWN = 1 << 30; const ALLOW_MISSING_POS = 1 << 31; @@ -1008,10 +98,10 @@ const HELP_REQUIRED = 1 << 39; const SUBCOMMAND_PRECEDENCE_OVER_ARG = 1 << 40; const DISABLE_HELP_FLAG = 1 << 41; - const USE_LONG_FORMAT_FOR_HELP_SC = 1 << 42; const INFER_LONG_ARGS = 1 << 43; const IGNORE_ERRORS = 1 << 44; const MULTICALL = 1 << 45; + const EXPAND_HELP_SUBCOMMAND_TREES = 1 << 46; const NO_OP = 0; } } @@ -1025,22 +115,12 @@ => Flags::ARGS_NEGATE_SCS, AllowExternalSubcommands => Flags::ALLOW_UNK_SC, - StrictUtf8 - => Flags::NO_OP, - AllowInvalidUtf8ForExternalSubcommands - => Flags::SC_UTF8_NONE, AllowHyphenValues => Flags::LEADING_HYPHEN, - AllowLeadingHyphen - => Flags::LEADING_HYPHEN, AllowNegativeNumbers => Flags::ALLOW_NEG_NUMS, AllowMissingPositional => Flags::ALLOW_MISSING_POS, - UnifiedHelpMessage - => Flags::NO_OP, - ColoredHelp - => Flags::NO_OP, ColorAlways => Flags::COLOR_ALWAYS, ColorAuto @@ -1049,59 +129,36 @@ => Flags::COLOR_NEVER, DontDelimitTrailingValues => Flags::DONT_DELIM_TRAIL, - DontCollapseArgsInUsage - => Flags::DONT_COLLAPSE_ARGS, - DeriveDisplayOrder - => Flags::DERIVE_DISP_ORDER, DisableColoredHelp => Flags::DISABLE_COLORED_HELP, DisableHelpSubcommand => Flags::DISABLE_HELP_SC, DisableHelpFlag => Flags::DISABLE_HELP_FLAG, - DisableHelpFlags - => Flags::DISABLE_HELP_FLAG, DisableVersionFlag => Flags::DISABLE_VERSION_FLAG, - DisableVersion - => Flags::DISABLE_VERSION_FLAG, PropagateVersion => Flags::PROPAGATE_VERSION, - GlobalVersion - => Flags::PROPAGATE_VERSION, HidePossibleValues => Flags::NO_POS_VALUES, - HidePossibleValuesInHelp - => Flags::NO_POS_VALUES, HelpExpected => Flags::HELP_REQUIRED, Hidden => Flags::HIDDEN, Multicall => Flags::MULTICALL, - NoAutoHelp - => Flags::NO_AUTO_HELP, - NoAutoVersion - => Flags::NO_AUTO_VERSION, NoBinaryName => Flags::NO_BIN_NAME, SubcommandsNegateReqs => Flags::SC_NEGATE_REQS, SubcommandRequired => Flags::SC_REQUIRED, - SubcommandRequiredElseHelp - => Flags::SC_REQUIRED_ELSE_HELP, - UseLongFormatForHelpSubcommand - => Flags::USE_LONG_FORMAT_FOR_HELP_SC, TrailingVarArg => Flags::TRAILING_VARARG, - UnifiedHelp => Flags::NO_OP, NextLineHelp => Flags::NEXT_LINE_HELP, IgnoreErrors => Flags::IGNORE_ERRORS, - WaitOnError - => Flags::WAIT_ON_ERROR, Built => Flags::BUILT, BinNameBuilt @@ -1113,196 +170,3 @@ InferLongArgs => Flags::INFER_LONG_ARGS } - -/// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? -#[cfg(feature = "yaml")] -impl FromStr for AppSettings { - type Err = String; - fn from_str(s: &str) -> Result::Err> { - #[allow(deprecated)] - #[allow(unreachable_patterns)] - match &*s.to_ascii_lowercase() { - "argrequiredelsehelp" => Ok(AppSettings::ArgRequiredElseHelp), - "subcommandprecedenceoverarg" => Ok(AppSettings::SubcommandPrecedenceOverArg), - "argsnegatesubcommands" => Ok(AppSettings::ArgsNegateSubcommands), - "allowexternalsubcommands" => Ok(AppSettings::AllowExternalSubcommands), - "strictutf8" => Ok(AppSettings::StrictUtf8), - "allowinvalidutf8forexternalsubcommands" => { - Ok(AppSettings::AllowInvalidUtf8ForExternalSubcommands) - } - "allowhyphenvalues" => Ok(AppSettings::AllowHyphenValues), - "allowleadinghyphen" => Ok(AppSettings::AllowLeadingHyphen), - "allownegativenumbers" => Ok(AppSettings::AllowNegativeNumbers), - "allowmissingpositional" => Ok(AppSettings::AllowMissingPositional), - "unifiedhelpmessage" => Ok(AppSettings::UnifiedHelpMessage), - "coloredhelp" => Ok(AppSettings::ColoredHelp), - "coloralways" => Ok(AppSettings::ColorAlways), - "colorauto" => Ok(AppSettings::ColorAuto), - "colornever" => Ok(AppSettings::ColorNever), - "dontdelimittrailingvalues" => Ok(AppSettings::DontDelimitTrailingValues), - "dontcollapseargsinusage" => Ok(AppSettings::DontCollapseArgsInUsage), - "derivedisplayorder" => Ok(AppSettings::DeriveDisplayOrder), - "disablecoloredhelp" => Ok(AppSettings::DisableColoredHelp), - "disablehelpsubcommand" => Ok(AppSettings::DisableHelpSubcommand), - "disablehelpflag" => Ok(AppSettings::DisableHelpFlag), - "disablehelpflags" => Ok(AppSettings::DisableHelpFlags), - "disableversionflag" => Ok(AppSettings::DisableVersionFlag), - "disableversion" => Ok(AppSettings::DisableVersion), - "propagateversion" => Ok(AppSettings::PropagateVersion), - "propagateversion" => Ok(AppSettings::GlobalVersion), - "hidepossiblevalues" => Ok(AppSettings::HidePossibleValues), - "hidepossiblevaluesinhelp" => Ok(AppSettings::HidePossibleValuesInHelp), - "helpexpected" => Ok(AppSettings::HelpExpected), - "hidden" => Ok(AppSettings::Hidden), - "noautohelp" => Ok(AppSettings::NoAutoHelp), - "noautoversion" => Ok(AppSettings::NoAutoVersion), - "nobinaryname" => Ok(AppSettings::NoBinaryName), - "subcommandsnegatereqs" => Ok(AppSettings::SubcommandsNegateReqs), - "subcommandrequired" => Ok(AppSettings::SubcommandRequired), - "subcommandrequiredelsehelp" => Ok(AppSettings::SubcommandRequiredElseHelp), - "uselongformatforhelpsubcommand" => Ok(AppSettings::UseLongFormatForHelpSubcommand), - "trailingvararg" => Ok(AppSettings::TrailingVarArg), - "unifiedhelp" => Ok(AppSettings::UnifiedHelp), - "nextlinehelp" => Ok(AppSettings::NextLineHelp), - "ignoreerrors" => Ok(AppSettings::IgnoreErrors), - "waitonerror" => Ok(AppSettings::WaitOnError), - "built" => Ok(AppSettings::Built), - "binnamebuilt" => Ok(AppSettings::BinNameBuilt), - "infersubcommands" => Ok(AppSettings::InferSubcommands), - "allargsoverrideself" => Ok(AppSettings::AllArgsOverrideSelf), - "inferlongargs" => Ok(AppSettings::InferLongArgs), - _ => Err(format!("unknown AppSetting: `{}`", s)), - } - } -} - -#[cfg(test)] -mod test { - #[allow(clippy::cognitive_complexity)] - #[test] - #[cfg(feature = "yaml")] - fn app_settings_fromstr() { - use super::AppSettings; - - assert_eq!( - "disablehelpflag".parse::().unwrap(), - AppSettings::DisableHelpFlag - ); - assert_eq!( - "argsnegatesubcommands".parse::().unwrap(), - AppSettings::ArgsNegateSubcommands - ); - assert_eq!( - "argrequiredelsehelp".parse::().unwrap(), - AppSettings::ArgRequiredElseHelp - ); - assert_eq!( - "subcommandprecedenceoverarg" - .parse::() - .unwrap(), - AppSettings::SubcommandPrecedenceOverArg - ); - assert_eq!( - "allowexternalsubcommands".parse::().unwrap(), - AppSettings::AllowExternalSubcommands - ); - assert_eq!( - "allowinvalidutf8forexternalsubcommands" - .parse::() - .unwrap(), - AppSettings::AllowInvalidUtf8ForExternalSubcommands - ); - assert_eq!( - "allowhyphenvalues".parse::().unwrap(), - AppSettings::AllowHyphenValues - ); - assert_eq!( - "allownegativenumbers".parse::().unwrap(), - AppSettings::AllowNegativeNumbers - ); - assert_eq!( - "disablehelpsubcommand".parse::().unwrap(), - AppSettings::DisableHelpSubcommand - ); - assert_eq!( - "disableversionflag".parse::().unwrap(), - AppSettings::DisableVersionFlag - ); - assert_eq!( - "dontcollapseargsinusage".parse::().unwrap(), - AppSettings::DontCollapseArgsInUsage - ); - assert_eq!( - "dontdelimittrailingvalues".parse::().unwrap(), - AppSettings::DontDelimitTrailingValues - ); - assert_eq!( - "derivedisplayorder".parse::().unwrap(), - AppSettings::DeriveDisplayOrder - ); - assert_eq!( - "disablecoloredhelp".parse::().unwrap(), - AppSettings::DisableColoredHelp - ); - assert_eq!( - "propagateversion".parse::().unwrap(), - AppSettings::PropagateVersion - ); - assert_eq!( - "hidden".parse::().unwrap(), - AppSettings::Hidden - ); - assert_eq!( - "hidepossiblevalues".parse::().unwrap(), - AppSettings::HidePossibleValues - ); - assert_eq!( - "helpexpected".parse::().unwrap(), - AppSettings::HelpExpected - ); - assert_eq!( - "nobinaryname".parse::().unwrap(), - AppSettings::NoBinaryName - ); - assert_eq!( - "nextlinehelp".parse::().unwrap(), - AppSettings::NextLineHelp - ); - assert_eq!( - "subcommandsnegatereqs".parse::().unwrap(), - AppSettings::SubcommandsNegateReqs - ); - assert_eq!( - "subcommandrequired".parse::().unwrap(), - AppSettings::SubcommandRequired - ); - assert_eq!( - "subcommandrequiredelsehelp".parse::().unwrap(), - AppSettings::SubcommandRequiredElseHelp - ); - assert_eq!( - "uselongformatforhelpsubcommand" - .parse::() - .unwrap(), - AppSettings::UseLongFormatForHelpSubcommand - ); - assert_eq!( - "trailingvararg".parse::().unwrap(), - AppSettings::TrailingVarArg - ); - assert_eq!( - "waitonerror".parse::().unwrap(), - AppSettings::WaitOnError - ); - assert_eq!("built".parse::().unwrap(), AppSettings::Built); - assert_eq!( - "binnamebuilt".parse::().unwrap(), - AppSettings::BinNameBuilt - ); - assert_eq!( - "infersubcommands".parse::().unwrap(), - AppSettings::InferSubcommands - ); - assert!("hahahaha".parse::().is_err()); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_group.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_group.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_group.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_group.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,8 +1,6 @@ // Internal -use crate::util::{Id, Key}; - -#[cfg(feature = "yaml")] -use yaml_rust::Yaml; +use crate::builder::IntoResettable; +use crate::util::Id; /// Family of related [arguments]. /// @@ -37,14 +35,14 @@ /// the arguments from the specified group is present at runtime. /// /// ```rust -/// # use clap::{Command, arg, ArgGroup, ErrorKind}; +/// # use clap::{Command, arg, ArgGroup, error::ErrorKind}; /// let result = Command::new("cmd") -/// .arg(arg!(--"set-ver" "set the version manually").required(false)) +/// .arg(arg!(--"set-ver" "set the version manually")) /// .arg(arg!(--major "auto increase major")) /// .arg(arg!(--minor "auto increase minor")) /// .arg(arg!(--patch "auto increase patch")) /// .group(ArgGroup::new("vers") -/// .args(&["set-ver", "major", "minor", "patch"]) +/// .args(["set-ver", "major", "minor", "patch"]) /// .required(true)) /// .try_get_matches_from(vec!["cmd", "--major", "--patch"]); /// // Because we used two args in the group it's an error @@ -52,23 +50,30 @@ /// let err = result.unwrap_err(); /// assert_eq!(err.kind(), ErrorKind::ArgumentConflict); /// ``` -/// This next example shows a passing parse of the same scenario /// +/// This next example shows a passing parse of the same scenario /// ```rust -/// # use clap::{Command, arg, ArgGroup}; +/// # use clap::{Command, arg, ArgGroup, Id}; /// let result = Command::new("cmd") -/// .arg(arg!(--"set-ver" "set the version manually").required(false)) +/// .arg(arg!(--"set-ver" "set the version manually")) /// .arg(arg!(--major "auto increase major")) /// .arg(arg!(--minor "auto increase minor")) /// .arg(arg!(--patch "auto increase patch")) /// .group(ArgGroup::new("vers") -/// .args(&["set-ver", "major", "minor","patch"]) +/// .args(["set-ver", "major", "minor","patch"]) /// .required(true)) /// .try_get_matches_from(vec!["cmd", "--major"]); /// assert!(result.is_ok()); /// let matches = result.unwrap(); /// // We may not know which of the args was used, so we can test for the group... /// assert!(matches.contains_id("vers")); +/// // We can also ask the group which arg was used +/// assert_eq!(matches +/// .get_one::("vers") +/// .expect("`vers` is required") +/// .as_str(), +/// "major" +/// ); /// // we could also alternatively check each arg individually (not shown here) /// ``` /// [`ArgGroup::multiple(true)`]: ArgGroup::multiple() @@ -77,10 +82,9 @@ /// [arguments]: crate::Arg /// [conflict]: crate::Arg::conflicts_with() /// [requirement]: crate::Arg::requires() -#[derive(Default, Debug, PartialEq, Eq)] -pub struct ArgGroup<'help> { +#[derive(Default, Clone, Debug, PartialEq, Eq)] +pub struct ArgGroup { pub(crate) id: Id, - pub(crate) name: &'help str, pub(crate) args: Vec, pub(crate) required: bool, pub(crate) requires: Vec, @@ -88,14 +92,8 @@ pub(crate) multiple: bool, } -impl<'help> ArgGroup<'help> { - pub(crate) fn with_id(id: Id) -> Self { - ArgGroup { - id, - ..ArgGroup::default() - } - } - +/// # Builder +impl ArgGroup { /// Create a `ArgGroup` using a unique name. /// /// The name will be used to get values from the group or refer to the group inside of conflict @@ -108,8 +106,8 @@ /// ArgGroup::new("config") /// # ; /// ``` - pub fn new>(n: S) -> Self { - ArgGroup::default().id(n) + pub fn new(id: impl Into) -> Self { + ArgGroup::default().id(id) } /// Sets the group name. @@ -118,44 +116,28 @@ /// /// ```rust /// # use clap::{Command, ArgGroup}; - /// ArgGroup::default().name("config") + /// ArgGroup::default().id("config") /// # ; /// ``` #[must_use] - pub fn id>(mut self, n: S) -> Self { - self.name = n.into(); - self.id = Id::from(self.name); + pub fn id(mut self, id: impl Into) -> Self { + self.id = id.into(); self } - /// Deprecated, replaced with [`ArgGroup::id`] - /// - /// Builder: replaced `group.name(...)` with `group.id(...)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `ArgGroup::id` - -Builder: replaced `group.name(...)` with `group.id(...)` -" - ) - )] - pub fn name>(self, n: S) -> Self { - self.id(n) - } - /// Adds an [argument] to this group by name /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup}; + /// # use clap::{Command, Arg, ArgGroup, ArgAction}; /// let m = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") /// .arg("flag") /// .arg("color")) @@ -167,8 +149,12 @@ /// ``` /// [argument]: crate::Arg #[must_use] - pub fn arg(mut self, arg_id: T) -> Self { - self.args.push(arg_id.into()); + pub fn arg(mut self, arg_id: impl IntoResettable) -> Self { + if let Some(arg_id) = arg_id.into_resettable().into_option() { + self.args.push(arg_id); + } else { + self.args.clear(); + } self } @@ -177,14 +163,16 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup}; + /// # use clap::{Command, Arg, ArgGroup, ArgAction}; /// let m = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"])) + /// .args(["flag", "color"])) /// .get_matches_from(vec!["myprog", "-f"]); /// // maybe we don't know which of the two flags was used... /// assert!(m.contains_id("req_flags")); @@ -193,13 +181,30 @@ /// ``` /// [arguments]: crate::Arg #[must_use] - pub fn args(mut self, ns: &[T]) -> Self { + pub fn args(mut self, ns: impl IntoIterator>) -> Self { for n in ns { self = self.arg(n); } self } + /// Getters for all args. It will return a vector of `Id` + /// + /// # Example + /// + /// ```rust + /// # use clap::{ArgGroup}; + /// let args: Vec<&str> = vec!["a1".into(), "a4".into()]; + /// let grp = ArgGroup::new("program").args(&args); + /// + /// for (pos, arg) in grp.get_args().enumerate() { + /// assert_eq!(*arg, args[pos]); + /// } + /// ``` + pub fn get_args(&self) -> impl Iterator { + self.args.iter() + } + /// Allows more than one of the [`Arg`]s in this group to be used. (Default: `false`) /// /// # Examples @@ -208,14 +213,16 @@ /// group /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup}; + /// # use clap::{Command, Arg, ArgGroup, ArgAction}; /// let m = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"]) + /// .args(["flag", "color"]) /// .multiple(true)) /// .get_matches_from(vec!["myprog", "-f", "-c"]); /// // maybe we don't know which of the two flags was used... @@ -225,14 +232,16 @@ /// an error if more than one of the args in the group was used. /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup, ErrorKind}; + /// # use clap::{Command, Arg, ArgGroup, error::ErrorKind, ArgAction}; /// let result = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"])) + /// .args(["flag", "color"])) /// .try_get_matches_from(vec!["myprog", "-f", "-c"]); /// // Because we used both args in the group it's an error /// assert!(result.is_err()); @@ -248,6 +257,23 @@ self } + /// Return true if the group allows more than one of the arguments + /// in this group to be used. (Default: `false`) + /// + /// # Example + /// + /// ```rust + /// # use clap::{ArgGroup}; + /// let mut group = ArgGroup::new("myprog") + /// .args(["f", "c"]) + /// .multiple(true); + /// + /// assert!(group.is_multiple()); + /// ``` + pub fn is_multiple(&mut self) -> bool { + self.multiple + } + /// Require an argument from the group to be present when parsing. /// /// This is unless conflicting with another argument. A required group will be displayed in @@ -264,14 +290,16 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup, ErrorKind}; + /// # use clap::{Command, Arg, ArgGroup, error::ErrorKind, ArgAction}; /// let result = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"]) + /// .args(["flag", "color"]) /// .required(true)) /// .try_get_matches_from(vec!["myprog"]); /// // Because we didn't use any of the args in the group, it's an error @@ -301,16 +329,19 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup, ErrorKind}; + /// # use clap::{Command, Arg, ArgGroup, error::ErrorKind, ArgAction}; /// let result = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("debug") - /// .short('d')) + /// .short('d') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"]) + /// .args(["flag", "color"]) /// .requires("debug")) /// .try_get_matches_from(vec!["myprog", "-c"]); /// // because we used an arg from the group, and the group requires "-d" to be used, it's an @@ -322,8 +353,12 @@ /// [required group]: ArgGroup::required() /// [argument requirement rules]: crate::Arg::requires() #[must_use] - pub fn requires(mut self, id: T) -> Self { - self.requires.push(id.into()); + pub fn requires(mut self, id: impl IntoResettable) -> Self { + if let Some(id) = id.into_resettable().into_option() { + self.requires.push(id); + } else { + self.requires.clear(); + } self } @@ -338,19 +373,23 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup, ErrorKind}; + /// # use clap::{Command, Arg, ArgGroup, error::ErrorKind, ArgAction}; /// let result = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("debug") - /// .short('d')) + /// .short('d') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("verb") - /// .short('v')) + /// .short('v') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"]) - /// .requires_all(&["debug", "verb"])) + /// .args(["flag", "color"]) + /// .requires_all(["debug", "verb"])) /// .try_get_matches_from(vec!["myprog", "-c", "-d"]); /// // because we used an arg from the group, and the group requires "-d" and "-v" to be used, /// // yet we only used "-d" it's an error @@ -359,9 +398,9 @@ /// assert_eq!(err.kind(), ErrorKind::MissingRequiredArgument); /// ``` /// [required group]: ArgGroup::required() - /// [argument requirement rules]: crate::Arg::requires_all() + /// [argument requirement rules]: crate::Arg::requires_ifs() #[must_use] - pub fn requires_all(mut self, ns: &[&'help str]) -> Self { + pub fn requires_all(mut self, ns: impl IntoIterator>) -> Self { for n in ns { self = self.requires(n); } @@ -379,16 +418,19 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup, ErrorKind}; + /// # use clap::{Command, Arg, ArgGroup, error::ErrorKind, ArgAction}; /// let result = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("debug") - /// .short('d')) + /// .short('d') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"]) + /// .args(["flag", "color"]) /// .conflicts_with("debug")) /// .try_get_matches_from(vec!["myprog", "-c", "-d"]); /// // because we used an arg from the group, and the group conflicts with "-d", it's an error @@ -398,8 +440,12 @@ /// ``` /// [argument exclusion rules]: crate::Arg::conflicts_with() #[must_use] - pub fn conflicts_with(mut self, id: T) -> Self { - self.conflicts.push(id.into()); + pub fn conflicts_with(mut self, id: impl IntoResettable) -> Self { + if let Some(id) = id.into_resettable().into_option() { + self.conflicts.push(id); + } else { + self.conflicts.clear(); + } self } @@ -413,19 +459,23 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg, ArgGroup, ErrorKind}; + /// # use clap::{Command, Arg, ArgGroup, error::ErrorKind, ArgAction}; /// let result = Command::new("myprog") /// .arg(Arg::new("flag") - /// .short('f')) + /// .short('f') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("color") - /// .short('c')) + /// .short('c') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("debug") - /// .short('d')) + /// .short('d') + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("verb") - /// .short('v')) + /// .short('v') + /// .action(ArgAction::SetTrue)) /// .group(ArgGroup::new("req_flags") - /// .args(&["flag", "color"]) - /// .conflicts_with_all(&["debug", "verb"])) + /// .args(["flag", "color"]) + /// .conflicts_with_all(["debug", "verb"])) /// .try_get_matches_from(vec!["myprog", "-c", "-v"]); /// // because we used an arg from the group, and the group conflicts with either "-v" or "-d" /// // it's an error @@ -436,129 +486,56 @@ /// /// [argument exclusion rules]: crate::Arg::conflicts_with_all() #[must_use] - pub fn conflicts_with_all(mut self, ns: &[&'help str]) -> Self { + pub fn conflicts_with_all(mut self, ns: impl IntoIterator>) -> Self { for n in ns { self = self.conflicts_with(n); } self } +} - /// Deprecated, replaced with [`ArgGroup::new`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `ArgGroup::new`") - )] - #[doc(hidden)] - pub fn with_name>(n: S) -> Self { - Self::new(n) - } - - /// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? - #[cfg(feature = "yaml")] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Maybe clap::Parser would fit your use case? (Issue #3087)" - ) - )] - #[doc(hidden)] - pub fn from_yaml(yaml: &'help Yaml) -> Self { - Self::from(yaml) +/// # Reflection +impl ArgGroup { + /// Get the name of the group + #[inline] + pub fn get_id(&self) -> &Id { + &self.id } -} -impl<'help> From<&'_ ArgGroup<'help>> for ArgGroup<'help> { - fn from(g: &ArgGroup<'help>) -> Self { - ArgGroup { - id: g.id.clone(), - name: g.name, - required: g.required, - args: g.args.clone(), - requires: g.requires.clone(), - conflicts: g.conflicts.clone(), - multiple: g.multiple, - } + /// Reports whether [`ArgGroup::required`] is set + #[inline] + pub fn is_required_set(&self) -> bool { + self.required } } -/// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? -#[cfg(feature = "yaml")] -impl<'help> From<&'help Yaml> for ArgGroup<'help> { - /// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? - fn from(y: &'help Yaml) -> Self { - let b = y.as_hash().expect("ArgGroup::from:: expects a table"); - // We WANT this to panic on error...so expect() is good. - let mut a = ArgGroup::default(); - let group_settings = if b.len() == 1 { - let name_yaml = b.keys().next().expect("failed to get name"); - let name_str = name_yaml - .as_str() - .expect("failed to convert arg YAML name to str"); - a.name = name_str; - a.id = Id::from(&a.name); - b.get(name_yaml) - .expect("failed to get name_str") - .as_hash() - .expect("failed to convert to a hash") - } else { - b - }; - - for (k, v) in group_settings { - a = match k.as_str().unwrap() { - "required" => a.required(v.as_bool().unwrap()), - "multiple" => a.multiple(v.as_bool().unwrap()), - "args" => yaml_vec_or_str!(a, v, arg), - "arg" => { - if let Some(ys) = v.as_str() { - a = a.arg(ys); - } - a - } - "requires" => yaml_vec_or_str!(a, v, requires), - "conflicts_with" => yaml_vec_or_str!(a, v, conflicts_with), - "name" => { - if let Some(ys) = v.as_str() { - a = a.id(ys); - } - a - } - s => panic!( - "Unknown ArgGroup setting '{}' in YAML file for \ - ArgGroup '{}'", - s, a.name - ), - } - } - - a +impl From<&'_ ArgGroup> for ArgGroup { + fn from(g: &ArgGroup) -> Self { + g.clone() } } #[cfg(test)] mod test { - use super::ArgGroup; - #[cfg(feature = "yaml")] - use yaml_rust::YamlLoader; + use super::*; #[test] fn groups() { let g = ArgGroup::new("test") .arg("a1") .arg("a4") - .args(&["a2", "a3"]) + .args(["a2", "a3"]) .required(true) .conflicts_with("c1") - .conflicts_with_all(&["c2", "c3"]) + .conflicts_with_all(["c2", "c3"]) .conflicts_with("c4") .requires("r1") - .requires_all(&["r2", "r3"]) + .requires_all(["r2", "r3"]) .requires("r4"); - let args = vec!["a1".into(), "a4".into(), "a2".into(), "a3".into()]; - let reqs = vec!["r1".into(), "r2".into(), "r3".into(), "r4".into()]; - let confs = vec!["c1".into(), "c2".into(), "c3".into(), "c4".into()]; + let args: Vec = vec!["a1".into(), "a4".into(), "a2".into(), "a3".into()]; + let reqs: Vec = vec!["r1".into(), "r2".into(), "r3".into(), "r4".into()]; + let confs: Vec = vec!["c1".into(), "c2".into(), "c3".into(), "c4".into()]; assert_eq!(g.args, args); assert_eq!(g.requires, reqs); @@ -570,18 +547,18 @@ let g = ArgGroup::new("test") .arg("a1") .arg("a4") - .args(&["a2", "a3"]) + .args(["a2", "a3"]) .required(true) .conflicts_with("c1") - .conflicts_with_all(&["c2", "c3"]) + .conflicts_with_all(["c2", "c3"]) .conflicts_with("c4") .requires("r1") - .requires_all(&["r2", "r3"]) + .requires_all(["r2", "r3"]) .requires("r4"); - let args = vec!["a1".into(), "a4".into(), "a2".into(), "a3".into()]; - let reqs = vec!["r1".into(), "r2".into(), "r3".into(), "r4".into()]; - let confs = vec!["c1".into(), "c2".into(), "c3".into(), "c4".into()]; + let args: Vec = vec!["a1".into(), "a4".into(), "a2".into(), "a3".into()]; + let reqs: Vec = vec!["r1".into(), "r2".into(), "r3".into(), "r4".into()]; + let confs: Vec = vec!["c1".into(), "c2".into(), "c3".into(), "c4".into()]; let g2 = ArgGroup::from(&g); assert_eq!(g2.args, args); @@ -589,53 +566,31 @@ assert_eq!(g2.conflicts, confs); } - #[cfg(feature = "yaml")] - #[test] - fn test_yaml() { - let g_yaml = "name: test -args: -- a1 -- a4 -- a2 -- a3 -conflicts_with: -- c1 -- c2 -- c3 -- c4 -requires: -- r1 -- r2 -- r3 -- r4"; - let yaml = &YamlLoader::load_from_str(g_yaml).expect("failed to load YAML file")[0]; - let g = ArgGroup::from(yaml); - let args = vec!["a1".into(), "a4".into(), "a2".into(), "a3".into()]; - let reqs = vec!["r1".into(), "r2".into(), "r3".into(), "r4".into()]; - let confs = vec!["c1".into(), "c2".into(), "c3".into(), "c4".into()]; - assert_eq!(g.args, args); - assert_eq!(g.requires, reqs); - assert_eq!(g.conflicts, confs); - } - // This test will *fail to compile* if ArgGroup is not Send + Sync #[test] fn arg_group_send_sync() { fn foo(_: T) {} foo(ArgGroup::new("test")) } -} -impl Clone for ArgGroup<'_> { - fn clone(&self) -> Self { - ArgGroup { - id: self.id.clone(), - name: self.name, - required: self.required, - args: self.args.clone(), - requires: self.requires.clone(), - conflicts: self.conflicts.clone(), - multiple: self.multiple, + #[test] + fn arg_group_expose_is_multiple_helper() { + let args: Vec = vec!["a1".into(), "a4".into()]; + + let mut grp_multiple = ArgGroup::new("test_multiple").args(&args).multiple(true); + assert!(grp_multiple.is_multiple()); + + let mut grp_not_multiple = ArgGroup::new("test_multiple").args(&args).multiple(false); + assert!(!grp_not_multiple.is_multiple()); + } + + #[test] + fn arg_group_expose_get_args_helper() { + let args: Vec = vec!["a1".into(), "a4".into()]; + let grp = ArgGroup::new("program").args(&args); + + for (pos, arg) in grp.get_args().enumerate() { + assert_eq!(*arg, args[pos]); } } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_predicate.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_predicate.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_predicate.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_predicate.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,14 +1,18 @@ -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub(crate) enum ArgPredicate<'help> { +use crate::builder::OsStr; + +/// Operations to perform on argument values +/// +/// These do not apply to [`ValueSource::DefaultValue`][crate::parser::ValueSource::DefaultValue] +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ArgPredicate { + /// Is the argument present? IsPresent, - Equals(&'help std::ffi::OsStr), + /// Does the argument match the specified value? + Equals(OsStr), } -impl<'help> From> for ArgPredicate<'help> { - fn from(other: Option<&'help std::ffi::OsStr>) -> Self { - match other { - Some(other) => Self::Equals(other), - None => Self::IsPresent, - } +impl> From for ArgPredicate { + fn from(other: S) -> Self { + Self::Equals(other.into()) } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,33 +1,27 @@ -#![allow(deprecated)] - // Std +#[cfg(feature = "env")] +use std::env; +#[cfg(feature = "env")] +use std::ffi::OsString; use std::{ - borrow::Cow, cmp::{Ord, Ordering}, - error::Error, - ffi::OsStr, fmt::{self, Display, Formatter}, str, - sync::{Arc, Mutex}, }; -#[cfg(feature = "env")] -use std::{env, ffi::OsString}; - -#[cfg(feature = "yaml")] -use yaml_rust::Yaml; // Internal -use crate::builder::usage_parser::UsageParser; +use super::{ArgFlags, ArgSettings}; use crate::builder::ArgPredicate; -use crate::util::{Id, Key}; +use crate::builder::IntoResettable; +use crate::builder::OsStr; +use crate::builder::PossibleValue; +use crate::builder::Str; +use crate::builder::StyledStr; +use crate::builder::ValueRange; use crate::ArgAction; -use crate::PossibleValue; +use crate::Id; use crate::ValueHint; use crate::INTERNAL_ERROR_MSG; -use crate::{ArgFlags, ArgSettings}; - -#[cfg(feature = "regex")] -use crate::builder::RegexRef; /// The abstract representation of a command line argument. Used to set all the options and /// relationships that define a valid argument for the program. @@ -45,69 +39,60 @@ /// # Examples /// /// ```rust -/// # use clap::{Arg, arg}; +/// # use clap::{Arg, arg, ArgAction}; /// // Using the traditional builder pattern and setting each option manually /// let cfg = Arg::new("config") /// .short('c') /// .long("config") -/// .takes_value(true) +/// .action(ArgAction::Set) /// .value_name("FILE") /// .help("Provides a config file to myprog"); /// // Using a usage string (setting a similar argument to the one above) /// let input = arg!(-i --input "Provides an input file to the program"); /// ``` -#[allow(missing_debug_implementations)] #[derive(Default, Clone)] -pub struct Arg<'help> { +pub struct Arg { pub(crate) id: Id, - pub(crate) provider: ArgProvider, - pub(crate) name: &'help str, - pub(crate) help: Option<&'help str>, - pub(crate) long_help: Option<&'help str>, + pub(crate) help: Option, + pub(crate) long_help: Option, pub(crate) action: Option, pub(crate) value_parser: Option, pub(crate) blacklist: Vec, pub(crate) settings: ArgFlags, pub(crate) overrides: Vec, pub(crate) groups: Vec, - pub(crate) requires: Vec<(ArgPredicate<'help>, Id)>, - pub(crate) r_ifs: Vec<(Id, &'help str)>, - pub(crate) r_ifs_all: Vec<(Id, &'help str)>, + pub(crate) requires: Vec<(ArgPredicate, Id)>, + pub(crate) r_ifs: Vec<(Id, OsStr)>, + pub(crate) r_ifs_all: Vec<(Id, OsStr)>, pub(crate) r_unless: Vec, pub(crate) r_unless_all: Vec, pub(crate) short: Option, - pub(crate) long: Option<&'help str>, - pub(crate) aliases: Vec<(&'help str, bool)>, // (name, visible) + pub(crate) long: Option, + pub(crate) aliases: Vec<(Str, bool)>, // (name, visible) pub(crate) short_aliases: Vec<(char, bool)>, // (name, visible) - pub(crate) disp_ord: DisplayOrder, - pub(crate) possible_vals: Vec>, - pub(crate) val_names: Vec<&'help str>, - pub(crate) num_vals: Option, - pub(crate) max_occurs: Option, - pub(crate) max_vals: Option, - pub(crate) min_vals: Option, - pub(crate) validator: Option>>>, - pub(crate) validator_os: Option>>>, + pub(crate) disp_ord: Option, + pub(crate) val_names: Vec, + pub(crate) num_vals: Option, pub(crate) val_delim: Option, - pub(crate) default_vals: Vec<&'help OsStr>, - pub(crate) default_vals_ifs: Vec<(Id, ArgPredicate<'help>, Option<&'help OsStr>)>, - pub(crate) default_missing_vals: Vec<&'help OsStr>, + pub(crate) default_vals: Vec, + pub(crate) default_vals_ifs: Vec<(Id, ArgPredicate, Option)>, + pub(crate) default_missing_vals: Vec, #[cfg(feature = "env")] - pub(crate) env: Option<(&'help OsStr, Option)>, - pub(crate) terminator: Option<&'help str>, + pub(crate) env: Option<(OsStr, Option)>, + pub(crate) terminator: Option, pub(crate) index: Option, - pub(crate) help_heading: Option>, + pub(crate) help_heading: Option>, pub(crate) value_hint: Option, } /// # Basic API -impl<'help> Arg<'help> { +impl Arg { /// Create a new [`Arg`] with a unique name. /// /// The name is used to check whether or not the argument was used at /// runtime, get values, set relationships with other args, etc.. /// - /// **NOTE:** In the case of arguments that take values (i.e. [`Arg::takes_value(true)`]) + /// **NOTE:** In the case of arguments that take values (i.e. [`Arg::action(ArgAction::Set)`]) /// and positional arguments (i.e. those without a preceding `-` or `--`) the name will also /// be displayed when the user prints the usage/help information of the program. /// @@ -118,45 +103,26 @@ /// Arg::new("config") /// # ; /// ``` - /// [`Arg::takes_value(true)`]: Arg::takes_value() - pub fn new>(n: S) -> Self { - Arg::default().name(n) + /// [`Arg::action(ArgAction::Set)`]: Arg::action() + pub fn new(id: impl Into) -> Self { + Arg::default().id(id) } /// Set the identifier used for referencing this argument in the clap API. /// /// See [`Arg::new`] for more details. #[must_use] - pub fn id>(mut self, n: S) -> Self { - let name = n.into(); - self.id = Id::from(&*name); - self.name = name; + pub fn id(mut self, id: impl Into) -> Self { + self.id = id.into(); self } - /// Deprecated, replaced with [`Arg::id`] to avoid confusion with [`Arg::value_name`] - /// - /// Builder: replaced `arg.name(...)` with `arg.id(...)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::id` to avoid confusion with `Arg::value_name` - -Builder: replaced `arg.name(...)` with `arg.id(...)` -" - ) - )] - pub fn name>(self, n: S) -> Self { - self.id(n) - } - /// Sets the short version of the argument without the preceding `-`. /// /// By default `V` and `h` are used by the auto-generated `version` and `help` arguments, - /// respectively. You may use the uppercase `V` or lowercase `h` for your own arguments, in - /// which case `clap` simply will not assign those to the auto-generated - /// `version` or `help` arguments. + /// respectively. You will need to disable the auto-generated flags + /// ([`disable_help_flag`][crate::Command::disable_help_flag], + /// [`disable_version_flag`][crate::Command::disable_version_flag]) and define your own. /// /// # Examples /// @@ -164,23 +130,45 @@ /// argument via a single hyphen (`-`) such as `-c`: /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("config") /// .short('c') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "-c", "file.toml" /// ]); /// /// assert_eq!(m.get_one::("config").map(String::as_str), Some("file.toml")); /// ``` + /// + /// To use `-h` for your own flag and still have help: + /// ```rust + /// # use clap::{Command, Arg, ArgAction}; + /// let m = Command::new("prog") + /// .disable_help_flag(true) + /// .arg(Arg::new("host") + /// .short('h') + /// .long("host")) + /// .arg(Arg::new("help") + /// .long("help") + /// .global(true) + /// .action(ArgAction::Help)) + /// .get_matches_from(vec![ + /// "prog", "-h", "wikipedia.org" + /// ]); + /// + /// assert_eq!(m.get_one::("host").map(String::as_str), Some("wikipedia.org")); + /// ``` #[inline] #[must_use] - pub fn short(mut self, s: char) -> Self { - assert!(s != '-', "short option name cannot be `-`"); - - self.short = Some(s); + pub fn short(mut self, s: impl IntoResettable) -> Self { + if let Some(s) = s.into_resettable().into_option() { + debug_assert!(s != '-', "short option name cannot be `-`"); + self.short = Some(s); + } else { + self.short = None; + } self } @@ -202,11 +190,11 @@ /// Setting `long` allows using the argument via a double hyphen (`--`) such as `--config` /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") /// .long("config") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "--config", "file.toml" /// ]); @@ -215,15 +203,8 @@ /// ``` #[inline] #[must_use] - pub fn long(mut self, l: &'help str) -> Self { - #[cfg(feature = "unstable-v4")] - { - self.long = Some(l); - } - #[cfg(not(feature = "unstable-v4"))] - { - self.long = Some(l.trim_start_matches(|c| c == '-')); - } + pub fn long(mut self, l: impl IntoResettable) -> Self { + self.long = l.into_resettable().into_option(); self } @@ -235,21 +216,24 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("test") /// .long("test") /// .alias("alias") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "--alias", "cool" /// ]); - /// assert!(m.contains_id("test")); - /// assert_eq!(m.value_of("test"), Some("cool")); + /// assert_eq!(m.get_one::("test").unwrap(), "cool"); /// ``` #[must_use] - pub fn alias>(mut self, name: S) -> Self { - self.aliases.push((name.into(), false)); + pub fn alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.aliases.push((name, false)); + } else { + self.aliases.clear(); + } self } @@ -261,23 +245,25 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("test") /// .short('t') /// .short_alias('e') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "-e", "cool" /// ]); - /// assert!(m.contains_id("test")); - /// assert_eq!(m.value_of("test"), Some("cool")); + /// assert_eq!(m.get_one::("test").unwrap(), "cool"); /// ``` #[must_use] - pub fn short_alias(mut self, name: char) -> Self { - assert!(name != '-', "short alias name cannot be `-`"); - - self.short_aliases.push((name, false)); + pub fn short_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + debug_assert!(name != '-', "short alias name cannot be `-`"); + self.short_aliases.push((name, false)); + } else { + self.short_aliases.clear(); + } self } @@ -293,7 +279,7 @@ /// let m = Command::new("prog") /// .arg(Arg::new("test") /// .long("test") - /// .aliases(&["do-stuff", "do-tests", "tests"]) + /// .aliases(["do-stuff", "do-tests", "tests"]) /// .action(ArgAction::SetTrue) /// .help("the file to add") /// .required(false)) @@ -303,8 +289,9 @@ /// assert_eq!(*m.get_one::("test").expect("defaulted by clap"), true); /// ``` #[must_use] - pub fn aliases(mut self, names: &[&'help str]) -> Self { - self.aliases.extend(names.iter().map(|&x| (x, false))); + pub fn aliases(mut self, names: impl IntoIterator>) -> Self { + self.aliases + .extend(names.into_iter().map(|x| (x.into(), false))); self } @@ -320,7 +307,7 @@ /// let m = Command::new("prog") /// .arg(Arg::new("test") /// .short('t') - /// .short_aliases(&['e', 's']) + /// .short_aliases(['e', 's']) /// .action(ArgAction::SetTrue) /// .help("the file to add") /// .required(false)) @@ -330,10 +317,10 @@ /// assert_eq!(*m.get_one::("test").expect("defaulted by clap"), true); /// ``` #[must_use] - pub fn short_aliases(mut self, names: &[char]) -> Self { + pub fn short_aliases(mut self, names: impl IntoIterator) -> Self { for s in names { - assert!(s != &'-', "short alias name cannot be `-`"); - self.short_aliases.push((*s, false)); + debug_assert!(s != '-', "short alias name cannot be `-`"); + self.short_aliases.push((s, false)); } self } @@ -345,22 +332,25 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("test") /// .visible_alias("something-awesome") /// .long("test") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "--something-awesome", "coffee" /// ]); - /// assert!(m.contains_id("test")); - /// assert_eq!(m.value_of("test"), Some("coffee")); + /// assert_eq!(m.get_one::("test").unwrap(), "coffee"); /// ``` /// [`Command::alias`]: Arg::alias() #[must_use] - pub fn visible_alias>(mut self, name: S) -> Self { - self.aliases.push((name.into(), true)); + pub fn visible_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.aliases.push((name, true)); + } else { + self.aliases.clear(); + } self } @@ -371,23 +361,25 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("test") /// .long("test") /// .visible_short_alias('t') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "-t", "coffee" /// ]); - /// assert!(m.contains_id("test")); - /// assert_eq!(m.value_of("test"), Some("coffee")); + /// assert_eq!(m.get_one::("test").unwrap(), "coffee"); /// ``` #[must_use] - pub fn visible_short_alias(mut self, name: char) -> Self { - assert!(name != '-', "short alias name cannot be `-`"); - - self.short_aliases.push((name, true)); + pub fn visible_short_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + debug_assert!(name != '-', "short alias name cannot be `-`"); + self.short_aliases.push((name, true)); + } else { + self.short_aliases.clear(); + } self } @@ -403,7 +395,7 @@ /// .arg(Arg::new("test") /// .long("test") /// .action(ArgAction::SetTrue) - /// .visible_aliases(&["something", "awesome", "cool"])) + /// .visible_aliases(["something", "awesome", "cool"])) /// .get_matches_from(vec![ /// "prog", "--awesome" /// ]); @@ -411,8 +403,9 @@ /// ``` /// [`Command::aliases`]: Arg::aliases() #[must_use] - pub fn visible_aliases(mut self, names: &[&'help str]) -> Self { - self.aliases.extend(names.iter().map(|n| (*n, true))); + pub fn visible_aliases(mut self, names: impl IntoIterator>) -> Self { + self.aliases + .extend(names.into_iter().map(|n| (n.into(), true))); self } @@ -428,17 +421,17 @@ /// .arg(Arg::new("test") /// .long("test") /// .action(ArgAction::SetTrue) - /// .visible_short_aliases(&['t', 'e'])) + /// .visible_short_aliases(['t', 'e'])) /// .get_matches_from(vec![ /// "prog", "-t" /// ]); /// assert_eq!(*m.get_one::("test").expect("defaulted by clap"), true); /// ``` #[must_use] - pub fn visible_short_aliases(mut self, names: &[char]) -> Self { + pub fn visible_short_aliases(mut self, names: impl IntoIterator) -> Self { for n in names { - assert!(n != &'-', "short alias name cannot be `-`"); - self.short_aliases.push((*n, true)); + debug_assert!(n != '-', "short alias name cannot be `-`"); + self.short_aliases.push((n, true)); } self } @@ -455,8 +448,8 @@ /// **NOTE:** This is only meant to be used for positional arguments and shouldn't to be used /// with [`Arg::short`] or [`Arg::long`]. /// - /// **NOTE:** When utilized with [`Arg::multiple_values(true)`], only the **last** positional argument - /// may be defined as multiple (i.e. with the highest index) + /// **NOTE:** When utilized with [`Arg::num_args(1..)`], only the **last** positional argument + /// may be defined as having a variable number of arguments (i.e. with the highest index) /// /// # Panics /// @@ -474,32 +467,63 @@ /// ``` /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("mode") /// .index(1)) /// .arg(Arg::new("debug") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .get_matches_from(vec![ /// "prog", "--debug", "fast" /// ]); /// /// assert!(m.contains_id("mode")); - /// assert_eq!(m.value_of("mode"), Some("fast")); // notice index(1) means "first positional" - /// // *not* first argument + /// assert_eq!(m.get_one::("mode").unwrap(), "fast"); // notice index(1) means "first positional" + /// // *not* first argument /// ``` /// [`Arg::short`]: Arg::short() /// [`Arg::long`]: Arg::long() - /// [`Arg::multiple_values(true)`]: Arg::multiple_values() - /// [`panic!`]: https://doc.rust-lang.org/std/macro.panic!.html + /// [`Arg::num_args(true)`]: Arg::num_args() /// [`Command`]: crate::Command #[inline] #[must_use] - pub fn index(mut self, idx: usize) -> Self { - self.index = Some(idx); + pub fn index(mut self, idx: impl IntoResettable) -> Self { + self.index = idx.into_resettable().into_option(); self } + /// This is a "VarArg" and everything that follows should be captured by it, as if the user had + /// used a `--`. + /// + /// **NOTE:** To start the trailing "VarArg" on unknown flags (and not just a positional + /// value), set [`allow_hyphen_values`][Arg::allow_hyphen_values]. Either way, users still + /// have the option to explicitly escape ambiguous arguments with `--`. + /// + /// **NOTE:** [`Arg::value_delimiter`] still applies if set. + /// + /// **NOTE:** Setting this requires [`Arg::num_args(..)`]. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{Command, arg}; + /// let m = Command::new("myprog") + /// .arg(arg!( ... "commands to run").trailing_var_arg(true)) + /// .get_matches_from(vec!["myprog", "arg1", "-r", "val1"]); + /// + /// let trail: Vec<_> = m.get_many::("cmd").unwrap().collect(); + /// assert_eq!(trail, ["arg1", "-r", "val1"]); + /// ``` + /// [`Arg::num_args(..)`]: crate::Arg::num_args() + pub fn trailing_var_arg(self, yes: bool) -> Self { + if yes { + self.setting(ArgSettings::TrailingVarArg) + } else { + self.unset_setting(ArgSettings::TrailingVarArg) + } + } + /// This arg is the last, or final, positional argument (i.e. has the highest /// index) and is *only* able to be accessed via the `--` syntax (i.e. `$ prog args -- /// last_arg`). @@ -517,7 +541,7 @@ /// /// **NOTE**: This setting only applies to positional arguments, and has no effect on OPTIONS /// - /// **NOTE:** Setting this requires [`Arg::takes_value`] + /// **NOTE:** Setting this requires [taking values][Arg::num_args] /// /// **CAUTION:** Using this setting *and* having child subcommands is not /// recommended with the exception of *also* using @@ -528,9 +552,9 @@ /// # Examples /// /// ```rust - /// # use clap::Arg; + /// # use clap::{Arg, ArgAction}; /// Arg::new("args") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .last(true) /// # ; /// ``` @@ -539,12 +563,12 @@ /// and requires that the `--` syntax be used to access it early. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("first")) /// .arg(Arg::new("second")) /// .arg(Arg::new("third") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .last(true)) /// .try_get_matches_from(vec![ /// "prog", "one", "--", "three" @@ -552,20 +576,20 @@ /// /// assert!(res.is_ok()); /// let m = res.unwrap(); - /// assert_eq!(m.value_of("third"), Some("three")); - /// assert!(m.value_of("second").is_none()); + /// assert_eq!(m.get_one::("third").unwrap(), "three"); + /// assert_eq!(m.get_one::("second"), None); /// ``` /// /// Even if the positional argument marked `Last` is the only argument left to parse, /// failing to use the `--` syntax results in an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("first")) /// .arg(Arg::new("second")) /// .arg(Arg::new("third") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .last(true)) /// .try_get_matches_from(vec![ /// "prog", "one", "two", "three" @@ -575,7 +599,7 @@ /// assert_eq!(res.unwrap_err().kind(), ErrorKind::UnknownArgument); /// ``` /// [index]: Arg::index() - /// [`UnknownArgument`]: crate::ErrorKind::UnknownArgument + /// [`UnknownArgument`]: crate::error::ErrorKind::UnknownArgument #[inline] #[must_use] pub fn last(self, yes: bool) -> Self { @@ -610,11 +634,11 @@ /// Setting required requires that the argument be used at runtime. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") /// .required(true) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .try_get_matches_from(vec![ /// "prog", "--config", "file.conf", @@ -626,11 +650,11 @@ /// Setting required and then *not* supplying that argument at runtime is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") /// .required(true) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .try_get_matches_from(vec![ /// "prog" @@ -669,10 +693,10 @@ /// required /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .requires("input") /// .long("config")) /// .arg(Arg::new("input")) @@ -686,10 +710,10 @@ /// Setting [`Arg::requires(name)`] and *not* supplying that argument is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .requires("input") /// .long("config")) /// .arg(Arg::new("input")) @@ -704,8 +728,12 @@ /// [Conflicting]: Arg::conflicts_with() /// [override]: Arg::overrides_with() #[must_use] - pub fn requires(mut self, arg_id: T) -> Self { - self.requires.push((ArgPredicate::IsPresent, arg_id.into())); + pub fn requires(mut self, arg_id: impl IntoResettable) -> Self { + if let Some(arg_id) = arg_id.into_resettable().into_option() { + self.requires.push((ArgPredicate::IsPresent, arg_id)); + } else { + self.requires.clear(); + } self } @@ -724,10 +752,10 @@ /// is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("exclusive") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .exclusive(true) /// .long("exclusive")) /// .arg(Arg::new("debug") @@ -793,86 +821,14 @@ } } - /// Deprecated, replaced with [`Arg::action`] ([Issue #3772](https://github.com/clap-rs/clap/issues/3772)) - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::action` (Issue #3772) - -Builder: replace `arg.multiple_occurrences(true)` with `arg.action(ArgAction::Append)` when taking a value and `arg.action(ArgAction::Count)` with `matches.get_count` when not -" - ) - )] - pub fn multiple_occurrences(self, yes: bool) -> Self { - if yes { - self.setting(ArgSettings::MultipleOccurrences) - } else { - self.unset_setting(ArgSettings::MultipleOccurrences) - } - } - - /// Deprecated, for flags, this is replaced with `RangedI64ValueParser::range` - /// - /// Derive: `#[clap(action = ArgAction::Count, value_parser = value_parser!(u8).range(..max))]` - /// - /// Builder: `arg.action(ArgAction::Count).value_parser(value_parser!(u8).range(..max))` - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "For flags, this is replaced with `RangedI64ValueParser::range` - -Derive: `#[clap(action = ArgAction::Count, value_parser = value_parser!(u8).range(..max))]` - -Builder: `arg.action(ArgAction::Count).value_parser(value_parser!(u8).range(..max))` -" - ) - )] - pub fn max_occurrences(mut self, qty: usize) -> Self { - self.max_occurs = Some(qty); - if qty > 1 { - self.multiple_occurrences(true) - } else { - self - } - } - - /// Check if the [`ArgSettings`] variant is currently set on the argument. - /// - /// [`ArgSettings`]: crate::ArgSettings #[inline] - pub fn is_set(&self, s: ArgSettings) -> bool { + pub(crate) fn is_set(&self, s: ArgSettings) -> bool { self.settings.is_set(s) } - /// Apply a setting to the argument. - /// - /// See [`ArgSettings`] for a full list of possibilities and examples. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Arg, ArgSettings}; - /// Arg::new("config") - /// .setting(ArgSettings::Required) - /// .setting(ArgSettings::TakesValue) - /// # ; - /// ``` - /// - /// ```no_run - /// # use clap::{Arg, ArgSettings}; - /// Arg::new("config") - /// .setting(ArgSettings::Required | ArgSettings::TakesValue) - /// # ; - /// ``` #[inline] #[must_use] - pub fn setting(mut self, setting: F) -> Self + pub(crate) fn setting(mut self, setting: F) -> Self where F: Into, { @@ -880,29 +836,9 @@ self } - /// Remove a setting from the argument. - /// - /// See [`ArgSettings`] for a full list of possibilities and examples. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Arg, ArgSettings}; - /// Arg::new("config") - /// .unset_setting(ArgSettings::Required) - /// .unset_setting(ArgSettings::TakesValue) - /// # ; - /// ``` - /// - /// ```no_run - /// # use clap::{Arg, ArgSettings}; - /// Arg::new("config") - /// .unset_setting(ArgSettings::Required | ArgSettings::TakesValue) - /// # ; - /// ``` #[inline] #[must_use] - pub fn unset_setting(mut self, setting: F) -> Self + pub(crate) fn unset_setting(mut self, setting: F) -> Self where F: Into, { @@ -912,49 +848,15 @@ } /// # Value Handling -impl<'help> Arg<'help> { - /// Specifies that the argument takes a value at run time. - /// - /// **NOTE:** values for arguments may be specified in any of the following methods - /// - /// - Using a space such as `-o value` or `--option value` - /// - Using an equals and no space such as `-o=value` or `--option=value` - /// - Use a short and no space such as `-ovalue` - /// - /// **NOTE:** By default, args which allow [multiple values] are delimited by commas, meaning - /// `--option=val1,val2,val3` is three values for the `--option` argument. If you wish to - /// change the delimiter to another character you can use [`Arg::value_delimiter(char)`], - /// alternatively you can turn delimiting values **OFF** by using - /// [`Arg::use_value_delimiter(false)`][Arg::use_value_delimiter] +impl Arg { + /// Specify how to react to an argument when parsing it. /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let m = Command::new("prog") - /// .arg(Arg::new("mode") - /// .long("mode") - /// .takes_value(true)) - /// .get_matches_from(vec![ - /// "prog", "--mode", "fast" - /// ]); + /// [ArgAction][crate::ArgAction] controls things like + /// - Overwriting previous values with new ones + /// - Appending new values to all previous ones + /// - Counting how many times a flag occurs /// - /// assert!(m.contains_id("mode")); - /// assert_eq!(m.value_of("mode"), Some("fast")); - /// ``` - /// [`Arg::value_delimiter(char)`]: Arg::value_delimiter() - /// [multiple values]: Arg::multiple_values - #[inline] - #[must_use] - pub fn takes_value(self, yes: bool) -> Self { - if yes { - self.setting(ArgSettings::TakesValue) - } else { - self.unset_setting(ArgSettings::TakesValue) - } - } - - /// Specify the behavior when parsing an argument + /// The default action is `ArgAction::Set` /// /// # Examples /// @@ -965,12 +867,11 @@ /// .arg( /// Arg::new("flag") /// .long("flag") - /// .action(clap::ArgAction::Set) + /// .action(clap::ArgAction::Append) /// ); /// /// let matches = cmd.try_get_matches_from(["mycmd", "--flag", "value"]).unwrap(); /// assert!(matches.contains_id("flag")); - /// assert_eq!(matches.occurrences_of("flag"), 0); /// assert_eq!( /// matches.get_many::("flag").unwrap_or_default().map(|v| v.as_str()).collect::>(), /// vec!["value"] @@ -978,25 +879,29 @@ /// ``` #[inline] #[must_use] - pub fn action(mut self, action: ArgAction) -> Self { - self.action = Some(action); + pub fn action(mut self, action: impl IntoResettable) -> Self { + self.action = action.into_resettable().into_option(); self } - /// Specify the type of the argument. + /// Specify the typed behavior of the argument. /// /// This allows parsing and validating a value before storing it into - /// [`ArgMatches`][crate::ArgMatches]. + /// [`ArgMatches`][crate::ArgMatches] as the given type. /// - /// See also - /// - [`value_parser!`][crate::value_parser!] for auto-selecting a value parser for a given type - /// - [`BoolishValueParser`][crate::builder::BoolishValueParser], and [`FalseyValueParser`][crate::builder::FalseyValueParser] for alternative `bool` implementations - /// - [`NonEmptyStringValueParser`][crate::builder::NonEmptyStringValueParser] for basic validation for strings - /// - [`RangedI64ValueParser`][crate::builder::RangedI64ValueParser] and [`RangedU64ValueParser`][crate::builder::RangedU64ValueParser] for numeric ranges - /// - [`EnumValueParser`][crate::builder::EnumValueParser] and [`PossibleValuesParser`][crate::builder::PossibleValuesParser] for static enumerated values + /// Possible value parsers include: + /// - [`value_parser!(T)`][crate::value_parser!] for auto-selecting a value parser for a given type + /// - Or [range expressions like `0..=1`][std::ops::RangeBounds] as a shorthand for [`RangedI64ValueParser`][crate::builder::RangedI64ValueParser] + /// - `Fn(&str) -> Result` + /// - `[&str]` and [`PossibleValuesParser`][crate::builder::PossibleValuesParser] for static enumerated values + /// - [`BoolishValueParser`][crate::builder::BoolishValueParser], and [`FalseyValueParser`][crate::builder::FalseyValueParser] for alternative `bool` implementations + /// - [`NonEmptyStringValueParser`][crate::builder::NonEmptyStringValueParser] for basic validation for strings /// - or any other [`TypedValueParser`][crate::builder::TypedValueParser] implementation /// + /// The default value is [`ValueParser::string`][crate::builder::ValueParser::string]. + /// /// ```rust + /// # use clap::ArgAction; /// let mut cmd = clap::Command::new("raw") /// .arg( /// clap::Arg::new("color") @@ -1008,14 +913,14 @@ /// clap::Arg::new("hostname") /// .long("hostname") /// .value_parser(clap::builder::NonEmptyStringValueParser::new()) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .required(true) /// ) /// .arg( /// clap::Arg::new("port") /// .long("port") /// .value_parser(clap::value_parser!(u16).range(3000..)) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .required(true) /// ); /// @@ -1035,134 +940,145 @@ /// .expect("required"); /// assert_eq!(port, 3001); /// ``` - pub fn value_parser(mut self, parser: impl Into) -> Self { - self.value_parser = Some(parser.into()); + pub fn value_parser(mut self, parser: impl IntoResettable) -> Self { + self.value_parser = parser.into_resettable().into_option(); self } - /// Specifies that the argument may have an unknown number of values + /// Specifies the number of arguments parsed per occurrence /// - /// Without any other settings, this argument may appear only *once*. + /// For example, if you had a `-f ` argument where you wanted exactly 3 'files' you would + /// set `.num_args(3)`, and this argument wouldn't be satisfied unless the user + /// provided 3 and only 3 values. /// - /// For example, `--opt val1 val2` is allowed, but `--opt val1 val2 --opt val3` is not. + /// Users may specify values for arguments in any of the following methods /// - /// **NOTE:** Setting this requires [`Arg::takes_value`]. + /// - Using a space such as `-o value` or `--option value` + /// - Using an equals and no space such as `-o=value` or `--option=value` + /// - Use a short and no space such as `-ovalue` /// /// **WARNING:** /// - /// Setting `multiple_values` for an argument that takes a value, but with no other details can - /// be dangerous in some circumstances. Because multiple values are allowed, - /// `--option val1 val2 val3` is perfectly valid. Be careful when designing a CLI where - /// positional arguments are *also* expected as `clap` will continue parsing *values* until one - /// of the following happens: + /// Setting a variable number of values (e.g. `1..=10`) for an argument without + /// other details can be dangerous in some circumstances. Because multiple values are + /// allowed, `--option val1 val2 val3` is perfectly valid. Be careful when designing a CLI + /// where **positional arguments** or **subcommands** are *also* expected as `clap` will continue + /// parsing *values* until one of the following happens: /// - /// - It reaches the [maximum number of values] - /// - It reaches a [specific number of values] + /// - It reaches the maximum number of values + /// - It reaches a specific number of values /// - It finds another flag or option (i.e. something that starts with a `-`) - /// - It reaches a [value terminator][Arg::value_terminator] is reached - /// - /// Alternatively, [require a delimiter between values][Arg::require_delimiter]. - /// - /// **WARNING:** - /// - /// When using args with `multiple_values` and [`subcommands`], one needs to consider the - /// possibility of an argument value being the same as a valid subcommand. By default `clap` will - /// parse the argument in question as a value *only if* a value is possible at that moment. - /// Otherwise it will be parsed as a subcommand. In effect, this means using `multiple_values` with no - /// additional parameters and a value that coincides with a subcommand name, the subcommand - /// cannot be called unless another argument is passed between them. - /// - /// As an example, consider a CLI with an option `--ui-paths=...` and subcommand `signer` - /// - /// The following would be parsed as values to `--ui-paths`. - /// - /// ```text - /// $ program --ui-paths path1 path2 signer - /// ``` + /// - It reaches the [`Arg::value_terminator`] if set /// - /// This is because `--ui-paths` accepts multiple values. `clap` will continue parsing values - /// until another argument is reached and it knows `--ui-paths` is done parsing. - /// - /// By adding additional parameters to `--ui-paths` we can solve this issue. Consider adding - /// [`Arg::number_of_values(1)`] or using *only* [`ArgAction::Append`]. The following are all - /// valid, and `signer` is parsed as a subcommand in the first case, but a value in the second - /// case. - /// - /// ```text - /// $ program --ui-paths path1 signer - /// $ program --ui-paths path1 --ui-paths signer signer - /// ``` + /// Alternatively, + /// - Use a delimiter between values with [Arg::value_delimiter] + /// - Require a flag occurrence per value with [`ArgAction::Append`] + /// - Require positional arguments to appear after `--` with [`Arg::last`] /// /// # Examples /// - /// An example with options - /// + /// Option: /// ```rust /// # use clap::{Command, Arg}; /// let m = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .multiple_values(true) - /// .short('F')) + /// .arg(Arg::new("mode") + /// .long("mode") + /// .num_args(1)) /// .get_matches_from(vec![ - /// "prog", "-F", "file1", "file2", "file3" + /// "prog", "--mode", "fast" /// ]); /// - /// assert!(m.contains_id("file")); - /// let files: Vec<_> = m.values_of("file").unwrap().collect(); - /// assert_eq!(files, ["file1", "file2", "file3"]); + /// assert_eq!(m.get_one::("mode").unwrap(), "fast"); /// ``` /// - /// Although `multiple_values` has been specified, we cannot use the argument more than once. + /// Flag/option hybrid (see also [default_missing_value][Arg::default_missing_value]) + /// ```rust + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; + /// let cmd = Command::new("prog") + /// .arg(Arg::new("mode") + /// .long("mode") + /// .default_missing_value("slow") + /// .default_value("plaid") + /// .num_args(0..=1)); + /// + /// let m = cmd.clone() + /// .get_matches_from(vec![ + /// "prog", "--mode", "fast" + /// ]); + /// assert_eq!(m.get_one::("mode").unwrap(), "fast"); + /// + /// let m = cmd.clone() + /// .get_matches_from(vec![ + /// "prog", "--mode", + /// ]); + /// assert_eq!(m.get_one::("mode").unwrap(), "slow"); + /// + /// let m = cmd.clone() + /// .get_matches_from(vec![ + /// "prog", + /// ]); + /// assert_eq!(m.get_one::("mode").unwrap(), "plaid"); + /// ``` /// + /// Tuples /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; - /// let res = Command::new("prog") + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; + /// let cmd = Command::new("prog") /// .arg(Arg::new("file") - /// .takes_value(true) - /// .multiple_values(true) - /// .short('F')) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1", "-F", "file2", "-F", "file3" + /// .action(ArgAction::Set) + /// .num_args(2) + /// .short('F')); + /// + /// let m = cmd.clone() + /// .get_matches_from(vec![ + /// "prog", "-F", "in-file", "out-file" /// ]); + /// assert_eq!( + /// m.get_many::("file").unwrap_or_default().map(|v| v.as_str()).collect::>(), + /// vec!["in-file", "out-file"] + /// ); /// - /// assert!(res.is_err()); - /// assert_eq!(res.unwrap_err().kind(), ErrorKind::UnexpectedMultipleUsage) + /// let res = cmd.clone() + /// .try_get_matches_from(vec![ + /// "prog", "-F", "file1" + /// ]); + /// assert_eq!(res.unwrap_err().kind(), ErrorKind::WrongNumberOfValues); /// ``` /// - /// A common mistake is to define an option which allows multiple values, and a positional + /// A common mistake is to define an option which allows multiple values and a positional /// argument. - /// /// ```rust - /// # use clap::{Command, Arg}; - /// let m = Command::new("prog") + /// # use clap::{Command, Arg, ArgAction}; + /// let cmd = Command::new("prog") /// .arg(Arg::new("file") - /// .takes_value(true) - /// .multiple_values(true) + /// .action(ArgAction::Set) + /// .num_args(0..) /// .short('F')) - /// .arg(Arg::new("word")) - /// .get_matches_from(vec![ - /// "prog", "-F", "file1", "file2", "file3", "word" - /// ]); + /// .arg(Arg::new("word")); /// - /// assert!(m.contains_id("file")); - /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// let m = cmd.clone().get_matches_from(vec![ + /// "prog", "-F", "file1", "file2", "file3", "word" + /// ]); + /// let files: Vec<_> = m.get_many::("file").unwrap().collect(); /// assert_eq!(files, ["file1", "file2", "file3", "word"]); // wait...what?! /// assert!(!m.contains_id("word")); // but we clearly used word! - /// ``` - /// - /// The problem is `clap` doesn't know when to stop parsing values for "files". This is further - /// compounded by if we'd said `word -F file1 file2` it would have worked fine, so it would - /// appear to only fail sometimes...not good! /// - /// A solution for the example above is to limit how many values with a [maximum], or [specific] - /// number, or to say [`ArgAction::Append`] is ok, but multiple values is not. + /// // but this works + /// let m = cmd.clone().get_matches_from(vec![ + /// "prog", "word", "-F", "file1", "file2", "file3", + /// ]); + /// let files: Vec<_> = m.get_many::("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3"]); + /// assert_eq!(m.get_one::("word").unwrap(), "word"); + /// ``` + /// The problem is `clap` doesn't know when to stop parsing values for "file". /// + /// A solution for the example above is to limit how many values with a maximum, or specific + /// number, or to say [`ArgAction::Append`] is ok, but multiple values are not. /// ```rust /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("file") - /// .takes_value(true) /// .action(ArgAction::Append) /// .short('F')) /// .arg(Arg::new("word")) @@ -1170,278 +1086,80 @@ /// "prog", "-F", "file1", "-F", "file2", "-F", "file3", "word" /// ]); /// - /// assert!(m.contains_id("file")); - /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// let files: Vec<_> = m.get_many::("file").unwrap().collect(); /// assert_eq!(files, ["file1", "file2", "file3"]); - /// assert!(m.contains_id("word")); - /// assert_eq!(m.value_of("word"), Some("word")); + /// assert_eq!(m.get_one::("word").unwrap(), "word"); /// ``` - /// - /// As a final example, let's fix the above error and get a pretty message to the user :) - /// - /// ```rust - /// # use clap::{Command, Arg, ErrorKind, ArgAction}; - /// let res = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .action(ArgAction::Append) - /// .short('F')) - /// .arg(Arg::new("word")) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1", "file2", "file3", "word" - /// ]); - /// - /// assert!(res.is_err()); - /// assert_eq!(res.unwrap_err().kind(), ErrorKind::UnknownArgument); - /// ``` - /// - /// [`subcommands`]: crate::Command::subcommand() - /// [`Arg::number_of_values(1)`]: Arg::number_of_values() - /// [maximum number of values]: Arg::max_values() - /// [specific number of values]: Arg::number_of_values() - /// [maximum]: Arg::max_values() - /// [specific]: Arg::number_of_values() #[inline] #[must_use] - pub fn multiple_values(self, yes: bool) -> Self { - if yes { - self.setting(ArgSettings::MultipleValues) - } else { - self.unset_setting(ArgSettings::MultipleValues) - } + pub fn num_args(mut self, qty: impl IntoResettable) -> Self { + self.num_vals = qty.into_resettable().into_option(); + self } - /// The number of values allowed for this argument. - /// - /// For example, if you had a - /// `-f ` argument where you wanted exactly 3 'files' you would set - /// `.number_of_values(3)`, and this argument wouldn't be satisfied unless the user provided - /// 3 and only 3 values. + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::num_args`") + )] + pub fn number_of_values(self, qty: usize) -> Self { + self.num_args(qty) + } + + /// Placeholder for the argument's value in the help message / usage. /// - /// **NOTE:** Does *not* require [`Arg::multiple_occurrences(true)`] to be set. Setting - /// [`Arg::multiple_occurrences(true)`] would allow `-f -f ` where - /// as *not* setting it would only allow one occurrence of this argument. + /// This name is cosmetic only; the name is **not** used to access arguments. + /// This setting can be very helpful when describing the type of input the user should be + /// using, such as `FILE`, `INTERFACE`, etc. Although not required, it's somewhat convention to + /// use all capital letters for the value name. /// - /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] and [`Arg::multiple_values(true)`]. + /// **NOTE:** implicitly sets [`Arg::action(ArgAction::Set)`] /// /// # Examples /// /// ```rust /// # use clap::{Command, Arg}; - /// Arg::new("file") - /// .short('f') - /// .number_of_values(3); + /// Arg::new("cfg") + /// .long("config") + /// .value_name("FILE") + /// # ; /// ``` /// - /// Not supplying the correct number of values is an error - /// - /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; - /// let res = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .number_of_values(2) - /// .short('F')) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1" - /// ]); - /// - /// assert!(res.is_err()); - /// assert_eq!(res.unwrap_err().kind(), ErrorKind::WrongNumberOfValues); - /// ``` - /// [`Arg::multiple_occurrences(true)`]: Arg::multiple_occurrences() - #[inline] - #[must_use] - pub fn number_of_values(mut self, qty: usize) -> Self { - self.num_vals = Some(qty); - self.takes_value(true).multiple_values(true) - } - - /// The *maximum* number of values are for this argument. - /// - /// For example, if you had a - /// `-f ` argument where you wanted up to 3 'files' you would set `.max_values(3)`, and - /// this argument would be satisfied if the user provided, 1, 2, or 3 values. - /// - /// **NOTE:** This does *not* implicitly set [`Arg::multiple_occurrences(true)`]. This is because - /// `-o val -o val` is multiple occurrences but a single value and `-o val1 val2` is a single - /// occurrence with multiple values. For positional arguments this **does** set - /// [`Arg::multiple_occurrences(true)`] because there is no way to determine the difference between multiple - /// occurrences and multiple values. - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// Arg::new("file") - /// .short('f') - /// .max_values(3); - /// ``` - /// - /// Supplying less than the maximum number of values is allowed - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let res = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .max_values(3) - /// .short('F')) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1", "file2" - /// ]); - /// - /// assert!(res.is_ok()); - /// let m = res.unwrap(); - /// let files: Vec<_> = m.values_of("file").unwrap().collect(); - /// assert_eq!(files, ["file1", "file2"]); - /// ``` - /// - /// Supplying more than the maximum number of values is an error - /// - /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; - /// let res = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .max_values(2) - /// .short('F')) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1", "file2", "file3" - /// ]); - /// - /// assert!(res.is_err()); - /// assert_eq!(res.unwrap_err().kind(), ErrorKind::UnknownArgument); - /// ``` - /// [`Arg::multiple_occurrences(true)`]: Arg::multiple_occurrences() - #[inline] - #[must_use] - pub fn max_values(mut self, qty: usize) -> Self { - self.max_vals = Some(qty); - self.takes_value(true).multiple_values(true) - } - - /// The *minimum* number of values for this argument. - /// - /// For example, if you had a - /// `-f ` argument where you wanted at least 2 'files' you would set - /// `.min_values(2)`, and this argument would be satisfied if the user provided, 2 or more - /// values. - /// - /// **NOTE:** This does not implicitly set [`Arg::multiple_occurrences(true)`]. This is because - /// `-o val -o val` is multiple occurrences but a single value and `-o val1 val2` is a single - /// occurrence with multiple values. For positional arguments this **does** set - /// [`Arg::multiple_occurrences(true)`] because there is no way to determine the difference between multiple - /// occurrences and multiple values. - /// - /// **NOTE:** Passing a non-zero value is not the same as specifying [`Arg::required(true)`]. - /// This is due to min and max validation only being performed for present arguments, - /// marking them as required will thus perform validation and a min value of 1 - /// is unnecessary, ignored if not required. - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// Arg::new("file") - /// .short('f') - /// .min_values(3); - /// ``` - /// - /// Supplying more than the minimum number of values is allowed - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let res = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .min_values(2) - /// .short('F')) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1", "file2", "file3" - /// ]); - /// - /// assert!(res.is_ok()); - /// let m = res.unwrap(); - /// let files: Vec<_> = m.values_of("file").unwrap().collect(); - /// assert_eq!(files, ["file1", "file2", "file3"]); - /// ``` - /// - /// Supplying less than the minimum number of values is an error - /// - /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; - /// let res = Command::new("prog") - /// .arg(Arg::new("file") - /// .takes_value(true) - /// .min_values(2) - /// .short('F')) - /// .try_get_matches_from(vec![ - /// "prog", "-F", "file1" - /// ]); - /// - /// assert!(res.is_err()); - /// assert_eq!(res.unwrap_err().kind(), ErrorKind::TooFewValues); - /// ``` - /// [`Arg::multiple_occurrences(true)`]: Arg::multiple_occurrences() - /// [`Arg::required(true)`]: Arg::required() - #[inline] - #[must_use] - pub fn min_values(mut self, qty: usize) -> Self { - self.min_vals = Some(qty); - self.takes_value(true).multiple_values(true) - } - - /// Placeholder for the argument's value in the help message / usage. - /// - /// This name is cosmetic only; the name is **not** used to access arguments. - /// This setting can be very helpful when describing the type of input the user should be - /// using, such as `FILE`, `INTERFACE`, etc. Although not required, it's somewhat convention to - /// use all capital letters for the value name. - /// - /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// Arg::new("cfg") - /// .long("config") - /// .value_name("FILE") - /// # ; - /// ``` - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let m = Command::new("prog") - /// .arg(Arg::new("config") - /// .long("config") - /// .value_name("FILE") - /// .help("Some help text")) - /// .get_matches_from(vec![ - /// "prog", "--help" - /// ]); - /// ``` - /// Running the above program produces the following output + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] + /// # use clap::{Command, Arg}; + /// let m = Command::new("prog") + /// .arg(Arg::new("config") + /// .long("config") + /// .value_name("FILE") + /// .help("Some help text")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// Running the above program produces the following output /// /// ```text /// valnames /// - /// USAGE: - /// valnames [OPTIONS] + /// Usage: valnames [OPTIONS] /// - /// OPTIONS: + /// Options: /// --config Some help text /// -h, --help Print help information /// -V, --version Print version information /// ``` - /// [option]: Arg::takes_value() /// [positional]: Arg::index() - /// [`Arg::takes_value(true)`]: Arg::takes_value() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() #[inline] #[must_use] - pub fn value_name(self, name: &'help str) -> Self { - self.value_names(&[name]) + pub fn value_name(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.value_names([name]) + } else { + self.val_names.clear(); + self + } } /// Placeholders for the argument's values in the help message / usage. @@ -1458,7 +1176,7 @@ /// **Pro Tip:** It may help to use [`Arg::next_line_help(true)`] if there are long, or /// multiple value names in order to not throw off the help text alignment of all options. /// - /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] and [`Arg::multiple_values(true)`]. + /// **NOTE:** implicitly sets [`Arg::action(ArgAction::Set)`] and [`Arg::num_args(1..)`]. /// /// # Examples /// @@ -1466,15 +1184,16 @@ /// # use clap::{Command, Arg}; /// Arg::new("speed") /// .short('s') - /// .value_names(&["fast", "slow"]); + /// .value_names(["fast", "slow"]); /// ``` /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("io") /// .long("io-files") - /// .value_names(&["INFILE", "OUTFILE"])) + /// .value_names(["INFILE", "OUTFILE"])) /// .get_matches_from(vec![ /// "prog", "--help" /// ]); @@ -1485,29 +1204,28 @@ /// ```text /// valnames /// - /// USAGE: - /// valnames [OPTIONS] + /// Usage: valnames [OPTIONS] /// - /// OPTIONS: + /// Options: /// -h, --help Print help information /// --io-files Some help text /// -V, --version Print version information /// ``` /// [`Arg::next_line_help(true)`]: Arg::next_line_help() - /// [`Arg::number_of_values`]: Arg::number_of_values() - /// [`Arg::takes_value(true)`]: Arg::takes_value() - /// [`Arg::multiple_values(true)`]: Arg::multiple_values() - #[must_use] - pub fn value_names(mut self, names: &[&'help str]) -> Self { - self.val_names = names.to_vec(); - self.takes_value(true) + /// [`Arg::num_args`]: Arg::num_args() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() + /// [`Arg::num_args(1..)`]: Arg::num_args() + #[must_use] + pub fn value_names(mut self, names: impl IntoIterator>) -> Self { + self.val_names = names.into_iter().map(|s| s.into()).collect(); + self } /// Provide the shell a hint about how to complete this argument. /// /// See [`ValueHint`][crate::ValueHint] for more information. /// - /// **NOTE:** implicitly sets [`Arg::takes_value(true)`]. + /// **NOTE:** implicitly sets [`Arg::action(ArgAction::Set)`]. /// /// For example, to take a username as argument: /// @@ -1522,173 +1240,23 @@ /// To take a full command line and its arguments (for example, when writing a command wrapper): /// /// ``` - /// # use clap::{Command, Arg, ValueHint}; + /// # use clap::{Command, Arg, ValueHint, ArgAction}; /// Command::new("prog") /// .trailing_var_arg(true) /// .arg( /// Arg::new("command") - /// .takes_value(true) - /// .multiple_values(true) + /// .action(ArgAction::Set) + /// .num_args(1..) /// .value_hint(ValueHint::CommandWithArguments) /// ); /// ``` #[must_use] - pub fn value_hint(mut self, value_hint: ValueHint) -> Self { - self.value_hint = Some(value_hint); - self.takes_value(true) - } - - /// Deprecated, replaced with [`Arg::value_parser(...)`] - /// - /// Derive: replace `#[clap(validator = ...)]` with `#[clap(value_parser = ...)]` - /// - /// Builder: replace `arg.validator(...)` with `arg.value_parser(...)` and `matches.value_of` with - /// `matches.get_one::` or `matches.values_of` with `matches.get_many::` - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::value_parser(...)` - -Derive: replace `#[clap(validator = )]` with `#[clap(value_parser = )]` - -Builder: replace `arg.validator()` with `arg.value_parser()` and `matches.value_of` with -`matches.get_one::` or `matches.values_of` with `matches.get_many::` -" - ) - )] - pub fn validator(mut self, mut f: F) -> Self - where - F: FnMut(&str) -> Result + Send + 'help, - E: Into>, - { - self.validator = Some(Arc::new(Mutex::new(move |s: &str| { - f(s).map(|_| ()).map_err(|e| e.into()) - }))); - self - } - - /// Deprecated, replaced with [`Arg::value_parser(...)`] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::value_parser(...)` - -Derive: replace `#[clap(validator = )]` with `#[clap(value_parser = )]` - -Builder: replace `arg.validator()` with `arg.value_parser()` and `matches.value_of_os` with -`matches.get_one::` or `matches.values_of_os` with `matches.get_many::` -" - ) - )] - pub fn validator_os(mut self, mut f: F) -> Self - where - F: FnMut(&OsStr) -> Result + Send + 'help, - E: Into>, - { - self.validator_os = Some(Arc::new(Mutex::new(move |s: &OsStr| { - f(s).map(|_| ()).map_err(|e| e.into()) - }))); + pub fn value_hint(mut self, value_hint: impl IntoResettable) -> Self { + self.value_hint = value_hint.into_resettable().into_option(); self } - /// Deprecated in [Issue #3743](https://github.com/clap-rs/clap/issues/3743), replaced with [`Arg::value_parser(...)`] - /// - /// Derive: replace `#[clap(validator_regex = ...)]` with `#[clap(value_parser = |s: &str| regex.is_match(s).then(|| s.to_owned()).ok_or_else(|| ...))]` - /// - /// Builder: replace `arg.validator_regex(...)` with `arg.value_parser(|s: &str| regex.is_match(s).then(|| s.to_owned()).ok_or_else(|| ...))` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Deprecated in Issue #3743; replaced with `Arg::value_parser(...)` - -Derive: replace `#[clap(validator_regex = ...)]` with `#[clap(value_parser = |s: &str| regex.is_match(s).then(|| s.to_owned()).ok_or_else(|| ...))]` - -Builder: replace `arg.validator_regex(...)` with `arg.value_parser(|s: &str| regex.is_match(s).then(|| s.to_owned()).ok_or_else(|| ...))` -" - ) - )] - #[cfg(feature = "regex")] - #[must_use] - pub fn validator_regex( - self, - regex: impl Into>, - err_message: &'help str, - ) -> Self { - let regex = regex.into(); - self.validator(move |s: &str| { - if regex.is_match(s) { - Ok(()) - } else { - Err(err_message) - } - }) - } - - /// Deprecated, replaced with [`Arg::value_parser(PossibleValuesParser::new(...))`] - /// - /// Derive: replace `#[clap(possible_value = <1>, possible_value = <2>, ...)]` with `#[clap(value_parser = [<1>, <2>])]`. - /// If the field is not a `String`, instead do `#[clap(value_parser = PossibleValueParser::new([<1>, <2>]).map(T::from_str))]` - /// - /// Builder: replace `arg.possible_value(<1>).possible_value(<2>) with `arg.value_parser([<1>, <2>])` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::value_parser(PossibleValuesParser::new(...)).takes_value(true)` - -Derive: replace `#[clap(possible_value = <1>, possible_value = <2>, ...)]` with `#[clap(value_parser = [<1>, <2>])]`. -If the field is not a `String`, instead do `#[clap(value_parser = PossibleValueParser::new([<1>, <2>]).map(T::from_str))]` - -Builder: replace `arg.possible_value(<1>).possible_value(<2>) with `arg.value_parser([<1>, <2>])` -" - ) - )] - #[must_use] - pub fn possible_value(mut self, value: T) -> Self - where - T: Into>, - { - self.possible_vals.push(value.into()); - self.takes_value(true) - } - - /// Deprecated, replaced with [`Arg::value_parser(PossibleValuesParser::new(...))`] - /// - /// Derive: replace `#[clap(possible_values = [<1>, <2>])]` with `#[clap(value_parser = [<1>, <2>])]`. - /// If the field is not a `String`, instead do `#[clap(value_parser = PossibleValueParser::new([<1>, <2>]).map(T::from_str))]` - /// - /// Builder: replace `arg.possible_values([<1>, <2>) with `arg.value_parser([<1>, <2>])` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::value_parser(PossibleValuesParser::new(...)).takes_value(true)` - -Derive: replace `#[clap(possible_values = [<1>, <2>])]` with `#[clap(value_parser = [<1>, <2>])]`. -If the field is not a `String`, instead do `#[clap(value_parser = PossibleValueParser::new([<1>, <2>]).map(T::from_str))]` - -Builder: replace `arg.possible_values([<1>, <2>) with `arg.value_parser([<1>, <2>])` -" - ) - )] - #[must_use] - pub fn possible_values(mut self, values: I) -> Self - where - I: IntoIterator, - T: Into>, - { - self.possible_vals - .extend(values.into_iter().map(|value| value.into())); - self.takes_value(true) - } - - /// Match values against [`Arg::possible_values`] without matching case. + /// Match values against [`PossibleValuesParser`][crate::builder::PossibleValuesParser] without matching case. /// /// When other arguments are conditionally required based on the /// value of a case-insensitive argument, the equality check done @@ -1696,44 +1264,44 @@ /// [`Arg::required_if_eq_all`] is case-insensitive. /// /// - /// **NOTE:** Setting this requires [`Arg::takes_value`] + /// **NOTE:** Setting this requires [taking values][Arg::num_args] /// /// **NOTE:** To do unicode case folding, enable the `unicode` feature flag. /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("pv") /// .arg(Arg::new("option") /// .long("option") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .ignore_case(true) /// .value_parser(["test123"])) /// .get_matches_from(vec![ /// "pv", "--option", "TeSt123", /// ]); /// - /// assert!(m.value_of("option").unwrap().eq_ignore_ascii_case("test123")); + /// assert!(m.get_one::("option").unwrap().eq_ignore_ascii_case("test123")); /// ``` /// /// This setting also works when multiple values can be defined: /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("pv") /// .arg(Arg::new("option") /// .short('o') /// .long("option") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .ignore_case(true) - /// .multiple_values(true) + /// .num_args(1..) /// .value_parser(["test123", "test321"])) /// .get_matches_from(vec![ /// "pv", "--option", "TeSt123", "teST123", "tESt321" /// ]); /// - /// let matched_vals = m.values_of("option").unwrap().collect::>(); + /// let matched_vals = m.get_many::("option").unwrap().collect::>(); /// assert_eq!(&*matched_vals, &["TeSt123", "teST123", "tESt321"]); /// ``` #[inline] @@ -1748,45 +1316,47 @@ /// Allows values which start with a leading hyphen (`-`) /// - /// **NOTE:** Setting this requires [`Arg::takes_value`] + /// To limit values to just numbers, see + /// [`allow_negative_numbers`][Arg::allow_negative_numbers]. /// - /// **WARNING**: Take caution when using this setting combined with - /// [`Arg::multiple_values`], as this becomes ambiguous `$ prog --arg -- -- val`. All - /// three `--, --, val` will be values when the user may have thought the second `--` would - /// constitute the normal, "Only positional args follow" idiom. To fix this, consider using - /// [`Arg::multiple_occurrences`] which only allows a single value at a time. + /// See also [`trailing_var_arg`][Arg::trailing_var_arg]. + /// + /// **NOTE:** Setting this requires [taking values][Arg::num_args] + /// + /// **WARNING:** Prior arguments with `allow_hyphen_values(true)` get precedence over known + /// flags but known flags get precedence over the next possible positional argument with + /// `allow_hyphen_values(true)`. When combined with [`Arg::num_args(..)`], + /// [`Arg::value_terminator`] is one way to ensure processing stops. /// - /// **WARNING**: When building your CLIs, consider the effects of allowing leading hyphens and - /// the user passing in a value that matches a valid short. For example, `prog -opt -F` where - /// `-F` is supposed to be a value, yet `-F` is *also* a valid short for another arg. - /// Care should be taken when designing these args. This is compounded by the ability to "stack" - /// short args. I.e. if `-val` is supposed to be a value, but `-v`, `-a`, and `-l` are all valid - /// shorts. + /// **WARNING**: Take caution when using this setting combined with another argument using + /// [`Arg::num_args`], as this becomes ambiguous `$ prog --arg -- -- val`. All + /// three `--, --, val` will be values when the user may have thought the second `--` would + /// constitute the normal, "Only positional args follow" idiom. /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("pat") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .allow_hyphen_values(true) /// .long("pattern")) /// .get_matches_from(vec![ /// "prog", "--pattern", "-file" /// ]); /// - /// assert_eq!(m.value_of("pat"), Some("-file")); + /// assert_eq!(m.get_one::("pat").unwrap(), "-file"); /// ``` /// /// Not setting `Arg::allow_hyphen_values(true)` and supplying a value which starts with a /// hyphen is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("pat") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("pattern")) /// .try_get_matches_from(vec![ /// "prog", "--pattern", "-file" @@ -1795,7 +1365,7 @@ /// assert!(res.is_err()); /// assert_eq!(res.unwrap_err().kind(), ErrorKind::UnknownArgument); /// ``` - /// [`Arg::number_of_values(1)`]: Arg::number_of_values() + /// [`Arg::num_args(1)`]: Arg::num_args() #[inline] #[must_use] pub fn allow_hyphen_values(self, yes: bool) -> Self { @@ -1806,63 +1376,32 @@ } } - /// Deprecated, replaced with [`value_parser`][Arg::value_parser] + /// Allows negative numbers to pass as values. /// - /// Derive: replace `#[clap(allow_invalid_utf8 = true)]` with `#[clap(action)]` (which opts-in to the - /// new clap v4 behavior which gets the type via `value_parser!`) + /// This is similar to [`Arg::allow_hyphen_values`] except that it only allows numbers, + /// all other undefined leading hyphens will fail to parse. /// - /// Builder: replace `arg.allow_invalid_utf8(true)` with `arg.value_parser(value_parser!(T))` where - /// `T` is the type of interest, like `OsString` or `PathBuf`, and `matches.value_of_os` with - /// `matches.get_one::` or `matches.values_of_os` with `matches.get_many::` - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `value_parser` - -Derive: replace `#[clap(allow_invalid_utf8 = true)]` with `#[clap(action)]` (which opts-in to the -new clap v4 behavior which gets the type via `value_parser!`) - -Builder: replace `arg.allow_invalid_utf8(true)` with `arg.value_parser(value_parser!(T))` where -`T` is the type of interest, like `OsString` or `PathBuf`, and `matches.value_of_os` with -`matches.get_one::` or `matches.values_of_os` with `matches.get_many::` -" - ) - )] - pub fn allow_invalid_utf8(self, yes: bool) -> Self { - if yes { - self.setting(ArgSettings::AllowInvalidUtf8) - } else { - self.unset_setting(ArgSettings::AllowInvalidUtf8) - } - } - - /// Deprecated, replaced with [`Arg::value_parser(NonEmptyStringValueParser::new())`] + /// **NOTE:** Setting this requires [taking values][Arg::num_args] /// - /// Derive: replace `#[clap(forbid_empty_values = true)]` with `#[clap(value_parser = NonEmptyStringValueParser::new())]` + /// # Examples /// - /// Builder: replace `arg.forbid_empty_values(true)` with `arg.value_parser(NonEmptyStringValueParser::new())` + /// ```rust + /// # use clap::{Command, Arg}; + /// let res = Command::new("myprog") + /// .arg(Arg::new("num").allow_negative_numbers(true)) + /// .try_get_matches_from(vec![ + /// "myprog", "-20" + /// ]); + /// assert!(res.is_ok()); + /// let m = res.unwrap(); + /// assert_eq!(m.get_one::("num").unwrap(), "-20"); + /// ``` #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::value_parser(NonEmptyStringValueParser::new())` - -Derive: replace `#[clap(forbid_empty_values = true)]` with `#[clap(value_parser = NonEmptyStringValueParser::new())]` - -Builder: replace `arg.forbid_empty_values(true)` with `arg.value_parser(NonEmptyStringValueParser::new())` -" - ) - )] - pub fn forbid_empty_values(self, yes: bool) -> Self { + pub fn allow_negative_numbers(self, yes: bool) -> Self { if yes { - self.setting(ArgSettings::ForbidEmptyValues) + self.setting(ArgSettings::AllowNegativeNumbers) } else { - self.unset_setting(ArgSettings::ForbidEmptyValues) + self.unset_setting(ArgSettings::AllowNegativeNumbers) } } @@ -1870,7 +1409,7 @@ /// /// i.e. an equals between the option and associated value. /// - /// **NOTE:** Setting this requires [`Arg::takes_value`] + /// **NOTE:** Setting this requires [taking values][Arg::num_args] /// /// # Examples /// @@ -1878,10 +1417,10 @@ /// it and the associated value. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .require_equals(true) /// .long("config")) /// .try_get_matches_from(vec![ @@ -1895,10 +1434,10 @@ /// error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .require_equals(true) /// .long("config")) /// .try_get_matches_from(vec![ @@ -1918,237 +1457,58 @@ } } - /// Specifies that an argument should allow grouping of multiple values via a - /// delimiter. + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::value_delimiter`") + )] + pub fn use_value_delimiter(mut self, yes: bool) -> Self { + if yes { + self.val_delim.get_or_insert(','); + } else { + self.val_delim = None; + } + self + } + + /// Allow grouping of multiple values via a delimiter. /// /// i.e. should `--option=val1,val2,val3` be parsed as three values (`val1`, `val2`, /// and `val3`) or as a single value (`val1,val2,val3`). Defaults to using `,` (comma) as the /// value delimiter for all arguments that accept values (options and positional arguments) /// - /// **NOTE:** When this setting is used, it will default [`Arg::value_delimiter`] - /// to the comma `,`. - /// - /// **NOTE:** Implicitly sets [`Arg::takes_value`] + /// **NOTE:** implicitly sets [`Arg::action(ArgAction::Set)`] /// /// # Examples /// - /// The following example shows the default behavior. - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let delims = Command::new("prog") - /// .arg(Arg::new("option") - /// .long("option") - /// .use_value_delimiter(true) - /// .takes_value(true)) - /// .get_matches_from(vec![ - /// "prog", "--option=val1,val2,val3", - /// ]); - /// - /// assert!(delims.contains_id("option")); - /// assert_eq!(delims.values_of("option").unwrap().collect::>(), ["val1", "val2", "val3"]); - /// ``` - /// The next example shows the difference when turning delimiters off. This is the default - /// behavior - /// /// ```rust /// # use clap::{Command, Arg}; - /// let nodelims = Command::new("prog") - /// .arg(Arg::new("option") - /// .long("option") - /// .takes_value(true)) + /// let m = Command::new("prog") + /// .arg(Arg::new("config") + /// .short('c') + /// .long("config") + /// .value_delimiter(',')) /// .get_matches_from(vec![ - /// "prog", "--option=val1,val2,val3", + /// "prog", "--config=val1,val2,val3" /// ]); /// - /// assert!(nodelims.contains_id("option")); - /// assert_eq!(nodelims.value_of("option").unwrap(), "val1,val2,val3"); + /// assert_eq!(m.get_many::("config").unwrap().collect::>(), ["val1", "val2", "val3"]) /// ``` - /// [`Arg::value_delimiter`]: Arg::value_delimiter() + /// [`Arg::value_delimiter(',')`]: Arg::value_delimiter() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() #[inline] #[must_use] - pub fn use_value_delimiter(mut self, yes: bool) -> Self { - if yes { - if self.val_delim.is_none() { - self.val_delim = Some(','); - } - self.takes_value(true) - .setting(ArgSettings::UseValueDelimiter) - } else { - self.val_delim = None; - self.unset_setting(ArgSettings::UseValueDelimiter) - } + pub fn value_delimiter(mut self, d: impl IntoResettable) -> Self { + self.val_delim = d.into_resettable().into_option(); + self } - /// Deprecated, replaced with [`Arg::use_value_delimiter`] + /// Sentinel to **stop** parsing multiple values of a given argument. /// - /// Derive: replace `#[clap(use_delimiter = true)]` with `#[clap(use_value_delimiter = true)]` - /// - /// Builder: replace `arg.use_delimiter(true)` with `arg.use_value_delimiter(true)` - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::use_value_delimiter` - -Derive: replace `#[clap(use_delimiter = true)]` with `#[clap(use_value_delimiter = true)]` - -Builder: replace `arg.use_delimiter(true)` with `arg.use_value_delimiter(true)` -" - ) - )] - pub fn use_delimiter(self, yes: bool) -> Self { - self.use_value_delimiter(yes) - } - - /// Separator between the arguments values, defaults to `,` (comma). - /// - /// **NOTE:** implicitly sets [`Arg::use_value_delimiter(true)`] - /// - /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let m = Command::new("prog") - /// .arg(Arg::new("config") - /// .short('c') - /// .long("config") - /// .value_delimiter(';')) - /// .get_matches_from(vec![ - /// "prog", "--config=val1;val2;val3" - /// ]); - /// - /// assert_eq!(m.values_of("config").unwrap().collect::>(), ["val1", "val2", "val3"]) - /// ``` - /// [`Arg::use_value_delimiter(true)`]: Arg::use_value_delimiter() - /// [`Arg::takes_value(true)`]: Arg::takes_value() - #[inline] - #[must_use] - pub fn value_delimiter(mut self, d: char) -> Self { - self.val_delim = Some(d); - self.takes_value(true).use_value_delimiter(true) - } - - /// Specifies that *multiple values* may only be set using the delimiter. - /// - /// This means if an option is encountered, and no delimiter is found, it is assumed that no - /// additional values for that option follow. This is unlike the default, where it is generally - /// assumed that more values will follow regardless of whether or not a delimiter is used. - /// - /// **NOTE:** The default is `false`. - /// - /// **NOTE:** Setting this requires [`Arg::use_value_delimiter`] and - /// [`Arg::takes_value`] - /// - /// **NOTE:** It's a good idea to inform the user that use of a delimiter is required, either - /// through help text or other means. - /// - /// # Examples - /// - /// These examples demonstrate what happens when `require_delimiter(true)` is used. Notice - /// everything works in this first example, as we use a delimiter, as expected. - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let delims = Command::new("prog") - /// .arg(Arg::new("opt") - /// .short('o') - /// .takes_value(true) - /// .use_value_delimiter(true) - /// .require_delimiter(true) - /// .multiple_values(true)) - /// .get_matches_from(vec![ - /// "prog", "-o", "val1,val2,val3", - /// ]); - /// - /// assert!(delims.contains_id("opt")); - /// assert_eq!(delims.values_of("opt").unwrap().collect::>(), ["val1", "val2", "val3"]); - /// ``` - /// - /// In this next example, we will *not* use a delimiter. Notice it's now an error. - /// - /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; - /// let res = Command::new("prog") - /// .arg(Arg::new("opt") - /// .short('o') - /// .takes_value(true) - /// .use_value_delimiter(true) - /// .require_delimiter(true)) - /// .try_get_matches_from(vec![ - /// "prog", "-o", "val1", "val2", "val3", - /// ]); - /// - /// assert!(res.is_err()); - /// let err = res.unwrap_err(); - /// assert_eq!(err.kind(), ErrorKind::UnknownArgument); - /// ``` - /// - /// What's happening is `-o` is getting `val1`, and because delimiters are required yet none - /// were present, it stops parsing `-o`. At this point it reaches `val2` and because no - /// positional arguments have been defined, it's an error of an unexpected argument. - /// - /// In this final example, we contrast the above with `clap`'s default behavior where the above - /// is *not* an error. - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let delims = Command::new("prog") - /// .arg(Arg::new("opt") - /// .short('o') - /// .takes_value(true) - /// .multiple_values(true)) - /// .get_matches_from(vec![ - /// "prog", "-o", "val1", "val2", "val3", - /// ]); - /// - /// assert!(delims.contains_id("opt")); - /// assert_eq!(delims.values_of("opt").unwrap().collect::>(), ["val1", "val2", "val3"]); - /// ``` - #[inline] - #[must_use] - pub fn require_value_delimiter(self, yes: bool) -> Self { - if yes { - self.setting(ArgSettings::RequireDelimiter) - } else { - self.unset_setting(ArgSettings::RequireDelimiter) - } - } - - /// Deprecated, replaced with [`Arg::require_value_delimiter`] - /// - /// Derive: replace `#[clap(require_delimiter = true)]` with `#[clap(require_value_delimiter = true)]` - /// - /// Builder: replace `arg.require_delimiter(true)` with `arg.require_value_delimiter(true)` - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::require_value_delimiter` - -Derive: replace `#[clap(require_delimiter = true)]` with `#[clap(require_value_delimiter = true)]` - -Builder: replace `arg.require_delimiter(true)` with `arg.require_value_delimiter(true)` -" - ) - )] - pub fn require_delimiter(self, yes: bool) -> Self { - self.require_value_delimiter(yes) - } - - /// Sentinel to **stop** parsing multiple values of a give argument. - /// - /// By default when - /// one sets [`multiple_values(true)`] on an argument, clap will continue parsing values for that - /// argument until it reaches another valid argument, or one of the other more specific settings - /// for multiple values is used (such as [`min_values`], [`max_values`] or - /// [`number_of_values`]). + /// By default when + /// one sets [`num_args(1..)`] on an argument, clap will continue parsing values for that + /// argument until it reaches another valid argument, or one of the other more specific settings + /// for multiple values is used (such as [`num_args`]). /// /// **NOTE:** This setting only applies to [options] and [positional arguments] /// @@ -2158,10 +1518,10 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// Arg::new("vals") - /// .takes_value(true) - /// .multiple_values(true) + /// .action(ArgAction::Set) + /// .num_args(1..) /// .value_terminator(";") /// # ; /// ``` @@ -2170,32 +1530,30 @@ /// to perform them /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("cmds") - /// .takes_value(true) - /// .multiple_values(true) + /// .action(ArgAction::Set) + /// .num_args(1..) /// .allow_hyphen_values(true) /// .value_terminator(";")) /// .arg(Arg::new("location")) /// .get_matches_from(vec![ /// "prog", "find", "-type", "f", "-name", "special", ";", "/home/clap" /// ]); - /// let cmds: Vec<_> = m.values_of("cmds").unwrap().collect(); + /// let cmds: Vec<_> = m.get_many::("cmds").unwrap().collect(); /// assert_eq!(&cmds, &["find", "-type", "f", "-name", "special"]); - /// assert_eq!(m.value_of("location"), Some("/home/clap")); + /// assert_eq!(m.get_one::("location").unwrap(), "/home/clap"); /// ``` - /// [options]: Arg::takes_value() + /// [options]: Arg::action /// [positional arguments]: Arg::index() - /// [`multiple_values(true)`]: Arg::multiple_values() - /// [`min_values`]: Arg::min_values() - /// [`number_of_values`]: Arg::number_of_values() - /// [`max_values`]: Arg::max_values() + /// [`num_args(1..)`]: Arg::num_args() + /// [`num_args`]: Arg::num_args() #[inline] #[must_use] - pub fn value_terminator(mut self, term: &'help str) -> Self { - self.terminator = Some(term); - self.takes_value(true) + pub fn value_terminator(mut self, term: impl IntoResettable) -> Self { + self.terminator = term.into_resettable().into_option(); + self } /// Consume all following arguments. @@ -2213,27 +1571,24 @@ /// may not be exactly what you are expecting and using [`crate::Command::trailing_var_arg`] /// may be more appropriate. /// - /// **NOTE:** Implicitly sets [`Arg::takes_value(true)`] [`Arg::multiple_values(true)`], + /// **NOTE:** Implicitly sets [`Arg::action(ArgAction::Set)`] [`Arg::num_args(1..)`], /// [`Arg::allow_hyphen_values(true)`], and [`Arg::last(true)`] when set to `true`. /// - /// [`Arg::takes_value(true)`]: Arg::takes_value() - /// [`Arg::multiple_values(true)`]: Arg::multiple_values() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() + /// [`Arg::num_args(1..)`]: Arg::num_args() /// [`Arg::allow_hyphen_values(true)`]: Arg::allow_hyphen_values() /// [`Arg::last(true)`]: Arg::last() #[inline] #[must_use] - pub fn raw(self, yes: bool) -> Self { - self.takes_value(yes) - .multiple_values(yes) - .allow_hyphen_values(yes) - .last(yes) + pub fn raw(mut self, yes: bool) -> Self { + if yes { + self.num_vals.get_or_insert_with(|| (1..).into()); + } + self.allow_hyphen_values(yes).last(yes) } /// Value for the argument when not present. /// - /// **NOTE:** If the user *does not* use this argument at runtime, [`ArgMatches::occurrences_of`] - /// will return `0` even though the [`ArgMatches::value_of`] will return the default specified. - /// /// **NOTE:** If the user *does not* use this argument at runtime [`ArgMatches::contains_id`] will /// still return `true`. If you wish to determine whether the argument was used at runtime or /// not, consider [`ArgMatches::value_source`][crate::ArgMatches::value_source]. @@ -2246,14 +1601,14 @@ /// at runtime, nor were the conditions met for `Arg::default_value_if`, the `Arg::default_value` /// will be applied. /// - /// **NOTE:** This implicitly sets [`Arg::takes_value(true)`]. + /// **NOTE:** This implicitly sets [`Arg::action(ArgAction::Set)`]. /// /// # Examples /// /// First we use the default value without providing any value at runtime. /// /// ```rust - /// # use clap::{Command, Arg, ValueSource}; + /// # use clap::{Command, Arg, parser::ValueSource}; /// let m = Command::new("prog") /// .arg(Arg::new("opt") /// .long("myopt") @@ -2262,7 +1617,7 @@ /// "prog" /// ]); /// - /// assert_eq!(m.value_of("opt"), Some("myval")); + /// assert_eq!(m.get_one::("opt").unwrap(), "myval"); /// assert!(m.contains_id("opt")); /// assert_eq!(m.value_source("opt"), Some(ValueSource::DefaultValue)); /// ``` @@ -2270,7 +1625,7 @@ /// Next we provide a value at runtime to override the default. /// /// ```rust - /// # use clap::{Command, Arg, ValueSource}; + /// # use clap::{Command, Arg, parser::ValueSource}; /// let m = Command::new("prog") /// .arg(Arg::new("opt") /// .long("myopt") @@ -2279,31 +1634,33 @@ /// "prog", "--myopt=non_default" /// ]); /// - /// assert_eq!(m.value_of("opt"), Some("non_default")); + /// assert_eq!(m.get_one::("opt").unwrap(), "non_default"); /// assert!(m.contains_id("opt")); /// assert_eq!(m.value_source("opt"), Some(ValueSource::CommandLine)); /// ``` - /// [`ArgMatches::occurrences_of`]: crate::ArgMatches::occurrences_of() - /// [`ArgMatches::value_of`]: crate::ArgMatches::value_of() - /// [`Arg::takes_value(true)`]: Arg::takes_value() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() /// [`ArgMatches::contains_id`]: crate::ArgMatches::contains_id() /// [`Arg::default_value_if`]: Arg::default_value_if() #[inline] #[must_use] - pub fn default_value(self, val: &'help str) -> Self { - self.default_values_os(&[OsStr::new(val)]) + pub fn default_value(mut self, val: impl IntoResettable) -> Self { + if let Some(val) = val.into_resettable().into_option() { + self.default_values([val]) + } else { + self.default_vals.clear(); + self + } } - /// Value for the argument when not present. - /// - /// See [`Arg::default_value`]. - /// - /// [`Arg::default_value`]: Arg::default_value() - /// [`OsStr`]: std::ffi::OsStr #[inline] #[must_use] - pub fn default_value_os(self, val: &'help OsStr) -> Self { - self.default_values_os(&[val]) + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::default_value`") + )] + pub fn default_value_os(self, val: impl Into) -> Self { + self.default_values([val]) } /// Value for the argument when not present. @@ -2313,22 +1670,20 @@ /// [`Arg::default_value`]: Arg::default_value() #[inline] #[must_use] - pub fn default_values(self, vals: &[&'help str]) -> Self { - let vals_vec: Vec<_> = vals.iter().map(|val| OsStr::new(*val)).collect(); - self.default_values_os(&vals_vec[..]) + pub fn default_values(mut self, vals: impl IntoIterator>) -> Self { + self.default_vals = vals.into_iter().map(|s| s.into()).collect(); + self } - /// Value for the argument when not present. - /// - /// See [`Arg::default_values`]. - /// - /// [`Arg::default_values`]: Arg::default_values() - /// [`OsStr`]: std::ffi::OsStr #[inline] #[must_use] - pub fn default_values_os(mut self, vals: &[&'help OsStr]) -> Self { - self.default_vals = vals.to_vec(); - self.takes_value(true) + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::default_values`") + )] + pub fn default_values_os(self, vals: impl IntoIterator>) -> Self { + self.default_values(vals) } /// Value for the argument when the flag is present but no value is specified. @@ -2338,22 +1693,23 @@ /// argument is a common example. By, supplying an default, such as `default_missing_value("always")`, /// the user can quickly just add `--color` to the command line to produce the desired color output. /// - /// **NOTE:** using this configuration option requires the use of the `.min_values(0)` and the - /// `.require_equals(true)` configuration option. These are required in order to unambiguously - /// determine what, if any, value was supplied for the argument. + /// **NOTE:** using this configuration option requires the use of the + /// [`.num_args(0..N)`][Arg::num_args] and the + /// [`.require_equals(true)`][Arg::require_equals] configuration option. These are required in + /// order to unambiguously determine what, if any, value was supplied for the argument. /// /// # Examples /// /// For POSIX style `--color`: /// ```rust - /// # use clap::{Command, Arg, ValueSource}; - /// fn cli() -> Command<'static> { + /// # use clap::{Command, Arg, parser::ValueSource}; + /// fn cli() -> Command { /// Command::new("prog") /// .arg(Arg::new("color").long("color") /// .value_name("WHEN") /// .value_parser(["always", "auto", "never"]) /// .default_value("auto") - /// .min_values(0) + /// .num_args(0..=1) /// .require_equals(true) /// .default_missing_value("always") /// .help("Specify WHEN to colorize output.") @@ -2364,33 +1720,33 @@ /// let m = cli().get_matches_from(vec![ /// "prog" /// ]); - /// assert_eq!(m.value_of("color"), Some("auto")); + /// assert_eq!(m.get_one::("color").unwrap(), "auto"); /// assert_eq!(m.value_source("color"), Some(ValueSource::DefaultValue)); /// /// // next, we'll provide a runtime value to override the default (as usually done). /// let m = cli().get_matches_from(vec![ /// "prog", "--color=never" /// ]); - /// assert_eq!(m.value_of("color"), Some("never")); + /// assert_eq!(m.get_one::("color").unwrap(), "never"); /// assert_eq!(m.value_source("color"), Some(ValueSource::CommandLine)); /// /// // finally, we will use the shortcut and only provide the argument without a value. /// let m = cli().get_matches_from(vec![ /// "prog", "--color" /// ]); - /// assert_eq!(m.value_of("color"), Some("always")); + /// assert_eq!(m.get_one::("color").unwrap(), "always"); /// assert_eq!(m.value_source("color"), Some(ValueSource::CommandLine)); /// ``` /// /// For bool literals: /// ```rust - /// # use clap::{Command, Arg, ValueSource, value_parser}; - /// fn cli() -> Command<'static> { + /// # use clap::{Command, Arg, parser::ValueSource, value_parser}; + /// fn cli() -> Command { /// Command::new("prog") /// .arg(Arg::new("create").long("create") /// .value_name("BOOL") /// .value_parser(value_parser!(bool)) - /// .min_values(0) + /// .num_args(0..=1) /// .require_equals(true) /// .default_missing_value("true") /// ) @@ -2417,13 +1773,17 @@ /// assert_eq!(m.value_source("create"), Some(ValueSource::CommandLine)); /// ``` /// - /// [`ArgMatches::value_of`]: ArgMatches::value_of() - /// [`Arg::takes_value(true)`]: Arg::takes_value() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() /// [`Arg::default_value`]: Arg::default_value() #[inline] #[must_use] - pub fn default_missing_value(self, val: &'help str) -> Self { - self.default_missing_values_os(&[OsStr::new(val)]) + pub fn default_missing_value(mut self, val: impl IntoResettable) -> Self { + if let Some(val) = val.into_resettable().into_option() { + self.default_missing_values_os([val]) + } else { + self.default_missing_vals.clear(); + self + } } /// Value for the argument when the flag is present but no value is specified. @@ -2434,8 +1794,8 @@ /// [`OsStr`]: std::ffi::OsStr #[inline] #[must_use] - pub fn default_missing_value_os(self, val: &'help OsStr) -> Self { - self.default_missing_values_os(&[val]) + pub fn default_missing_value_os(self, val: impl Into) -> Self { + self.default_missing_values_os([val]) } /// Value for the argument when the flag is present but no value is specified. @@ -2445,9 +1805,8 @@ /// [`Arg::default_missing_value`]: Arg::default_missing_value() #[inline] #[must_use] - pub fn default_missing_values(self, vals: &[&'help str]) -> Self { - let vals_vec: Vec<_> = vals.iter().map(|val| OsStr::new(*val)).collect(); - self.default_missing_values_os(&vals_vec[..]) + pub fn default_missing_values(self, vals: impl IntoIterator>) -> Self { + self.default_missing_values_os(vals) } /// Value for the argument when the flag is present but no value is specified. @@ -2458,9 +1817,12 @@ /// [`OsStr`]: std::ffi::OsStr #[inline] #[must_use] - pub fn default_missing_values_os(mut self, vals: &[&'help OsStr]) -> Self { - self.default_missing_vals = vals.to_vec(); - self.takes_value(true) + pub fn default_missing_values_os( + mut self, + vals: impl IntoIterator>, + ) -> Self { + self.default_missing_vals = vals.into_iter().map(|s| s.into()).collect(); + self } /// Read from `name` environment variable when argument is not present. @@ -2469,13 +1831,15 @@ /// rules will apply. /// /// If user sets the argument in the environment: - /// - When [`Arg::takes_value(true)`] is not set, the flag is considered raised. - /// - When [`Arg::takes_value(true)`] is set, [`ArgMatches::value_of`] will + /// - When [`Arg::action(ArgAction::Set)`] is not set, the flag is considered raised. + /// - When [`Arg::action(ArgAction::Set)`] is set, + /// [`ArgMatches::get_one`][crate::ArgMatches::get_one] will /// return value of the environment variable. /// /// If user doesn't set the argument in the environment: - /// - When [`Arg::takes_value(true)`] is not set, the flag is considered off. - /// - When [`Arg::takes_value(true)`] is set, [`ArgMatches::value_of`] will + /// - When [`Arg::action(ArgAction::Set)`] is not set, the flag is considered off. + /// - When [`Arg::action(ArgAction::Set)`] is set, + /// [`ArgMatches::get_one`][crate::ArgMatches::get_one] will /// return the default specified. /// /// # Examples @@ -2484,7 +1848,7 @@ /// /// ```rust /// # use std::env; - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// /// env::set_var("MY_FLAG", "env"); /// @@ -2492,23 +1856,26 @@ /// .arg(Arg::new("flag") /// .long("flag") /// .env("MY_FLAG") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog" /// ]); /// - /// assert_eq!(m.value_of("flag"), Some("env")); + /// assert_eq!(m.get_one::("flag").unwrap(), "env"); /// ``` /// - /// In this example, because [`Arg::takes_value(false)`] (by default), - /// `prog` is a flag that accepts an optional, case-insensitive boolean literal. - /// A `false` literal is `n`, `no`, `f`, `false`, `off` or `0`. - /// An absent environment variable will also be considered as `false`. - /// Anything else will considered as `true`. + /// In this example, because `prog` is a flag that accepts an optional, case-insensitive + /// boolean literal. + /// + /// Note that the value parser controls how flags are parsed. In this case we've selected + /// [`FalseyValueParser`][crate::builder::FalseyValueParser]. A `false` literal is `n`, `no`, + /// `f`, `false`, `off` or `0`. An absent environment variable will also be considered as + /// `false`. Anything else will considered as `true`. /// /// ```rust /// # use std::env; - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; + /// # use clap::builder::FalseyValueParser; /// /// env::set_var("TRUE_FLAG", "true"); /// env::set_var("FALSE_FLAG", "0"); @@ -2516,28 +1883,33 @@ /// let m = Command::new("prog") /// .arg(Arg::new("true_flag") /// .long("true_flag") + /// .action(ArgAction::SetTrue) + /// .value_parser(FalseyValueParser::new()) /// .env("TRUE_FLAG")) /// .arg(Arg::new("false_flag") /// .long("false_flag") + /// .action(ArgAction::SetTrue) + /// .value_parser(FalseyValueParser::new()) /// .env("FALSE_FLAG")) /// .arg(Arg::new("absent_flag") /// .long("absent_flag") + /// .action(ArgAction::SetTrue) + /// .value_parser(FalseyValueParser::new()) /// .env("ABSENT_FLAG")) /// .get_matches_from(vec![ /// "prog" /// ]); /// - /// assert!(m.is_present("true_flag")); - /// assert_eq!(m.value_of("true_flag"), None); - /// assert!(!m.is_present("false_flag")); - /// assert!(!m.is_present("absent_flag")); + /// assert!(*m.get_one::("true_flag").unwrap()); + /// assert!(!*m.get_one::("false_flag").unwrap()); + /// assert!(!*m.get_one::("absent_flag").unwrap()); /// ``` /// /// In this example, we show the variable coming from an option on the CLI: /// /// ```rust /// # use std::env; - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// /// env::set_var("MY_FLAG", "env"); /// @@ -2545,12 +1917,12 @@ /// .arg(Arg::new("flag") /// .long("flag") /// .env("MY_FLAG") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .get_matches_from(vec![ /// "prog", "--flag", "opt" /// ]); /// - /// assert_eq!(m.value_of("flag"), Some("opt")); + /// assert_eq!(m.get_one::("flag").unwrap(), "opt"); /// ``` /// /// In this example, we show the variable coming from the environment even with the @@ -2558,7 +1930,7 @@ /// /// ```rust /// # use std::env; - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// /// env::set_var("MY_FLAG", "env"); /// @@ -2566,20 +1938,20 @@ /// .arg(Arg::new("flag") /// .long("flag") /// .env("MY_FLAG") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .default_value("default")) /// .get_matches_from(vec![ /// "prog" /// ]); /// - /// assert_eq!(m.value_of("flag"), Some("env")); + /// assert_eq!(m.get_one::("flag").unwrap(), "env"); /// ``` /// /// In this example, we show the use of multiple values in a single environment variable: /// /// ```rust /// # use std::env; - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// /// env::set_var("MY_FLAG_MULTI", "env1,env2"); /// @@ -2587,39 +1959,43 @@ /// .arg(Arg::new("flag") /// .long("flag") /// .env("MY_FLAG_MULTI") - /// .takes_value(true) - /// .multiple_values(true) - /// .use_value_delimiter(true)) + /// .action(ArgAction::Set) + /// .num_args(1..) + /// .value_delimiter(',')) /// .get_matches_from(vec![ /// "prog" /// ]); /// - /// assert_eq!(m.values_of("flag").unwrap().collect::>(), vec!["env1", "env2"]); + /// assert_eq!(m.get_many::("flag").unwrap().collect::>(), vec!["env1", "env2"]); /// ``` - /// [`ArgMatches::value_of`]: crate::ArgMatches::value_of() - /// [`Arg::takes_value(true)`]: Arg::takes_value() - /// [`Arg::use_value_delimiter(true)`]: Arg::use_value_delimiter() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() + /// [`Arg::value_delimiter(',')`]: Arg::value_delimiter() #[cfg(feature = "env")] #[inline] #[must_use] - pub fn env(self, name: &'help str) -> Self { - self.env_os(OsStr::new(name)) + pub fn env(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + let value = env::var_os(&name); + self.env = Some((name, value)); + } else { + self.env = None; + } + self } - /// Read from `name` environment variable when argument is not present. - /// - /// See [`Arg::env`]. #[cfg(feature = "env")] - #[inline] - #[must_use] - pub fn env_os(mut self, name: &'help OsStr) -> Self { - self.env = Some((name, env::var_os(name))); - self + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::env`") + )] + pub fn env_os(self, name: impl Into) -> Self { + self.env(name) } } /// # Help -impl<'help> Arg<'help> { +impl Arg { /// Sets the description of the argument for short help (`-h`). /// /// Typically, this is a short (one line) description of the arg. @@ -2637,7 +2013,8 @@ /// Setting `help` displays a short message to the side of the argument when the user passes /// `-h` or `--help` (by default). /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -2653,10 +2030,9 @@ /// ```notrust /// helptest /// - /// USAGE: - /// helptest [OPTIONS] + /// Usage: helptest [OPTIONS] /// - /// OPTIONS: + /// Options: /// --config Some help text describing the --config arg /// -h, --help Print help information /// -V, --version Print version information @@ -2664,8 +2040,8 @@ /// [`Arg::long_help`]: Arg::long_help() #[inline] #[must_use] - pub fn help(mut self, h: impl Into>) -> Self { - self.help = h.into(); + pub fn help(mut self, h: impl IntoResettable) -> Self { + self.help = h.into_resettable().into_option(); self } @@ -2687,7 +2063,8 @@ /// Setting `help` displays a short message to the side of the argument when the user passes /// `-h` or `--help` (by default). /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -2707,10 +2084,9 @@ /// ```text /// prog /// - /// USAGE: - /// prog [OPTIONS] + /// Usage: prog [OPTIONS] /// - /// OPTIONS: + /// Options: /// --config /// The config file used by the myprog must be in JSON format /// with only valid keys and may not contain other nonsense @@ -2726,8 +2102,8 @@ /// [`Arg::help`]: Arg::help() #[inline] #[must_use] - pub fn long_help(mut self, h: impl Into>) -> Self { - self.long_help = h.into(); + pub fn long_help(mut self, h: impl IntoResettable) -> Self { + self.long_help = h.into_resettable().into_option(); self } @@ -2744,20 +2120,21 @@ /// /// # Examples /// - /// ```rust - /// # use clap::{Command, Arg}; + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("a") // Typically args are grouped alphabetically by name. /// // Args without a display_order have a value of 999 and are /// // displayed alphabetically with all other 999 valued args. /// .long("long-option") /// .short('o') - /// .takes_value(true) + /// .action(ArgAction::Set) /// .help("Some help and text")) /// .arg(Arg::new("b") /// .long("other-option") /// .short('O') - /// .takes_value(true) + /// .action(ArgAction::Set) /// .display_order(1) // In order to force this arg to appear *first* /// // all we have to do is give it a value lower than 999. /// // Any other args with a value of 1 will be displayed @@ -2773,10 +2150,9 @@ /// ```text /// cust-ord /// - /// USAGE: - /// cust-ord [OPTIONS] + /// Usage: cust-ord [OPTIONS] /// - /// OPTIONS: + /// Options: /// -h, --help Print help information /// -V, --version Print version information /// -O, --other-option I should be first! @@ -2786,21 +2162,18 @@ /// [index]: Arg::index() #[inline] #[must_use] - pub fn display_order(mut self, ord: usize) -> Self { - self.disp_ord.set_explicit(ord); + pub fn display_order(mut self, ord: impl IntoResettable) -> Self { + self.disp_ord = ord.into_resettable().into_option(); self } /// Override the [current] help section. /// - /// [current]: crate::Command::help_heading + /// [current]: crate::Command::next_help_heading #[inline] #[must_use] - pub fn help_heading(mut self, heading: O) -> Self - where - O: Into>, - { - self.help_heading = Some(heading.into()); + pub fn help_heading(mut self, heading: impl IntoResettable) -> Self { + self.help_heading = Some(heading.into_resettable().into_option()); self } @@ -2814,15 +2187,16 @@ /// /// # Examples /// - /// ```rust - /// # use clap::{Command, Arg}; + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("opt") /// .long("long-option-flag") /// .short('o') - /// .takes_value(true) + /// .action(ArgAction::Set) /// .next_line_help(true) - /// .value_names(&["value1", "value2"]) + /// .value_names(["value1", "value2"]) /// .help("Some really long help and complex\n\ /// help that makes more sense to be\n\ /// on a line after the option")) @@ -2836,10 +2210,9 @@ /// ```text /// nlh /// - /// USAGE: - /// nlh [OPTIONS] + /// Usage: nlh [OPTIONS] /// - /// OPTIONS: + /// Options: /// -h, --help Print help information /// -V, --version Print version information /// -o, --long-option-flag @@ -2865,7 +2238,8 @@ /// /// Setting `Hidden` will hide the argument when displaying help text /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -2882,10 +2256,9 @@ /// ```text /// helptest /// - /// USAGE: - /// helptest [OPTIONS] + /// Usage: helptest [OPTIONS] /// - /// OPTIONS: + /// Options: /// -h, --help Print help information /// -V, --version Print version information /// ``` @@ -2904,7 +2277,7 @@ /// This is useful for args with many values, or ones which are explained elsewhere in the /// help text. /// - /// **NOTE:** Setting this requires [`Arg::takes_value`] + /// **NOTE:** Setting this requires [taking values][Arg::num_args] /// /// To set this for all arguments, see /// [`Command::hide_possible_values`][crate::Command::hide_possible_values]. @@ -2912,12 +2285,12 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("mode") /// .long("mode") /// .value_parser(["fast", "slow"]) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .hide_possible_values(true)); /// ``` /// If we were to run the above program with `--help` the `[values: fast, slow]` portion of @@ -2936,17 +2309,17 @@ /// /// This is useful when default behavior of an arg is explained elsewhere in the help text. /// - /// **NOTE:** Setting this requires [`Arg::takes_value`] + /// **NOTE:** Setting this requires [taking values][Arg::num_args] /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("connect") /// .arg(Arg::new("host") /// .long("host") /// .default_value("localhost") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .hide_default_value(true)); /// /// ``` @@ -2970,12 +2343,12 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("mode") /// .long("mode") /// .env("MODE") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .hide_env(true)); /// ``` /// @@ -2999,12 +2372,12 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("connect") /// .arg(Arg::new("host") /// .long("host") /// .env("CONNECT") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .hide_env_values(true)); /// /// ``` @@ -3039,7 +2412,8 @@ /// /// Setting `hide_short_help(true)` will hide the argument when displaying short help text /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -3056,17 +2430,17 @@ /// ```text /// helptest /// - /// USAGE: - /// helptest [OPTIONS] + /// Usage: helptest [OPTIONS] /// - /// OPTIONS: + /// Options: /// -h, --help Print help information /// -V, --version Print version information /// ``` /// /// However, when --help is called /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -3083,10 +2457,9 @@ /// ```text /// helptest /// - /// USAGE: - /// helptest [OPTIONS] + /// Usage: helptest [OPTIONS] /// - /// OPTIONS: + /// Options: /// --config Some help text describing the --config arg /// -h, --help Print help information /// -V, --version Print version information @@ -3112,7 +2485,8 @@ /// /// Setting `hide_long_help(true)` will hide the argument when displaying long help text /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -3129,17 +2503,17 @@ /// ```text /// helptest /// - /// USAGE: - /// helptest [OPTIONS] + /// Usage: helptest [OPTIONS] /// - /// OPTIONS: + /// Options: /// -h, --help Print help information /// -V, --version Print version information /// ``` /// /// However, when -h is called /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, Arg}; /// let m = Command::new("prog") /// .arg(Arg::new("cfg") @@ -3156,8 +2530,7 @@ /// ```text /// helptest /// - /// USAGE: - /// helptest [OPTIONS] + /// Usage: helptest [OPTIONS] /// /// OPTIONS: /// --config Some help text describing the --config arg @@ -3176,15 +2549,16 @@ } /// # Advanced Argument Relations -impl<'help> Arg<'help> { +impl Arg { /// The name of the [`ArgGroup`] the argument belongs to. /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// Arg::new("debug") /// .long("debug") + /// .action(ArgAction::SetTrue) /// .group("mode") /// # ; /// ``` @@ -3193,13 +2567,15 @@ /// was one of said arguments. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("debug") /// .long("debug") + /// .action(ArgAction::SetTrue) /// .group("mode")) /// .arg(Arg::new("verbose") /// .long("verbose") + /// .action(ArgAction::SetTrue) /// .group("mode")) /// .get_matches_from(vec![ /// "prog", "--debug" @@ -3209,8 +2585,12 @@ /// /// [`ArgGroup`]: crate::ArgGroup #[must_use] - pub fn group(mut self, group_id: T) -> Self { - self.groups.push(group_id.into()); + pub fn group(mut self, group_id: impl IntoResettable) -> Self { + if let Some(group_id) = group_id.into_resettable().into_option() { + self.groups.push(group_id); + } else { + self.groups.clear(); + } self } @@ -3219,10 +2599,11 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// Arg::new("debug") /// .long("debug") - /// .groups(&["mode", "verbosity"]) + /// .action(ArgAction::SetTrue) + /// .groups(["mode", "verbosity"]) /// # ; /// ``` /// @@ -3230,14 +2611,16 @@ /// was one of said arguments. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("debug") /// .long("debug") - /// .groups(&["mode", "verbosity"])) + /// .action(ArgAction::SetTrue) + /// .groups(["mode", "verbosity"])) /// .arg(Arg::new("verbose") /// .long("verbose") - /// .groups(&["mode", "verbosity"])) + /// .action(ArgAction::SetTrue) + /// .groups(["mode", "verbosity"])) /// .get_matches_from(vec![ /// "prog", "--debug" /// ]); @@ -3247,16 +2630,13 @@ /// /// [`ArgGroup`]: crate::ArgGroup #[must_use] - pub fn groups(mut self, group_ids: &[T]) -> Self { - self.groups.extend(group_ids.iter().map(Id::from)); + pub fn groups(mut self, group_ids: impl IntoIterator>) -> Self { + self.groups.extend(group_ids.into_iter().map(Into::into)); self } /// Specifies the value of the argument if `arg` has been used at runtime. /// - /// If `val` is set to `None`, `arg` only needs to be present. If `val` is set to `"some-val"` - /// then `arg` must be present at runtime **and** have the value `val`. - /// /// If `default` is set to `None`, `default_value` will be removed. /// /// **NOTE:** This setting is perfectly compatible with [`Arg::default_value`] but slightly @@ -3266,131 +2646,138 @@ /// and `Arg::default_value_if`, and the user **did not** provide this arg at runtime, nor were /// the conditions met for `Arg::default_value_if`, the `Arg::default_value` will be applied. /// - /// **NOTE:** This implicitly sets [`Arg::takes_value(true)`]. + /// **NOTE:** This implicitly sets [`Arg::action(ArgAction::Set)`]. /// /// # Examples /// /// First we use the default value only if another arg is present at runtime. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; + /// # use clap::builder::{ArgPredicate}; /// let m = Command::new("prog") /// .arg(Arg::new("flag") - /// .long("flag")) + /// .long("flag") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_if("flag", None, Some("default"))) + /// .default_value_if("flag", ArgPredicate::IsPresent, Some("default"))) /// .get_matches_from(vec![ /// "prog", "--flag" /// ]); /// - /// assert_eq!(m.value_of("other"), Some("default")); + /// assert_eq!(m.get_one::("other").unwrap(), "default"); /// ``` /// /// Next we run the same test, but without providing `--flag`. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("flag") - /// .long("flag")) + /// .long("flag") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_if("flag", None, Some("default"))) + /// .default_value_if("flag", "true", Some("default"))) /// .get_matches_from(vec![ /// "prog" /// ]); /// - /// assert_eq!(m.value_of("other"), None); + /// assert_eq!(m.get_one::("other"), None); /// ``` /// /// Now lets only use the default value if `--opt` contains the value `special`. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("opt") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("opt")) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_if("opt", Some("special"), Some("default"))) + /// .default_value_if("opt", "special", Some("default"))) /// .get_matches_from(vec![ /// "prog", "--opt", "special" /// ]); /// - /// assert_eq!(m.value_of("other"), Some("default")); + /// assert_eq!(m.get_one::("other").unwrap(), "default"); /// ``` /// /// We can run the same test and provide any value *other than* `special` and we won't get a /// default value. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("opt") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("opt")) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_if("opt", Some("special"), Some("default"))) + /// .default_value_if("opt", "special", Some("default"))) /// .get_matches_from(vec![ /// "prog", "--opt", "hahaha" /// ]); /// - /// assert_eq!(m.value_of("other"), None); + /// assert_eq!(m.get_one::("other"), None); /// ``` /// /// If we want to unset the default value for an Arg based on the presence or /// value of some other Arg. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("flag") - /// .long("flag")) + /// .long("flag") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("other") /// .long("other") /// .default_value("default") - /// .default_value_if("flag", None, None)) + /// .default_value_if("flag", "true", None)) /// .get_matches_from(vec![ /// "prog", "--flag" /// ]); /// - /// assert_eq!(m.value_of("other"), None); + /// assert_eq!(m.get_one::("other"), None); /// ``` - /// [`Arg::takes_value(true)`]: Arg::takes_value() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() /// [`Arg::default_value`]: Arg::default_value() #[must_use] - pub fn default_value_if( - self, - arg_id: T, - val: Option<&'help str>, - default: Option<&'help str>, + pub fn default_value_if( + mut self, + arg_id: impl Into, + predicate: impl Into, + default: impl IntoResettable, ) -> Self { - self.default_value_if_os(arg_id, val.map(OsStr::new), default.map(OsStr::new)) + self.default_vals_ifs.push(( + arg_id.into(), + predicate.into(), + default.into_resettable().into_option(), + )); + self } - /// Provides a conditional default value in the exact same manner as [`Arg::default_value_if`] - /// only using [`OsStr`]s instead. - /// - /// [`Arg::default_value_if`]: Arg::default_value_if() - /// [`OsStr`]: std::ffi::OsStr #[must_use] - pub fn default_value_if_os( - mut self, - arg_id: T, - val: Option<&'help OsStr>, - default: Option<&'help OsStr>, + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::default_value_if`") + )] + pub fn default_value_if_os( + self, + arg_id: impl Into, + predicate: impl Into, + default: impl IntoResettable, ) -> Self { - self.default_vals_ifs - .push((arg_id.into(), val.into(), default)); - self.takes_value(true) + self.default_value_if(arg_id, predicate, default) } /// Specifies multiple values and conditions in the same manner as [`Arg::default_value_if`]. /// - /// The method takes a slice of tuples in the `(arg, Option, default)` format. + /// The method takes a slice of tuples in the `(arg, predicate, default)` format. /// /// **NOTE**: The conditions are stored in order and evaluated in the same order. I.e. the first /// if multiple conditions are true, the first one found will be applied and the ultimate value. @@ -3400,96 +2787,109 @@ /// First we use the default value only if another arg is present at runtime. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("flag") - /// .long("flag")) + /// .long("flag") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("opt") /// .long("opt") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_ifs(&[ - /// ("flag", None, Some("default")), - /// ("opt", Some("channal"), Some("chan")), + /// .default_value_ifs([ + /// ("flag", "true", Some("default")), + /// ("opt", "channal", Some("chan")), /// ])) /// .get_matches_from(vec![ /// "prog", "--opt", "channal" /// ]); /// - /// assert_eq!(m.value_of("other"), Some("chan")); + /// assert_eq!(m.get_one::("other").unwrap(), "chan"); /// ``` /// /// Next we run the same test, but without providing `--flag`. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let m = Command::new("prog") /// .arg(Arg::new("flag") - /// .long("flag")) + /// .long("flag") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_ifs(&[ - /// ("flag", None, Some("default")), - /// ("opt", Some("channal"), Some("chan")), + /// .default_value_ifs([ + /// ("flag", "true", Some("default")), + /// ("opt", "channal", Some("chan")), /// ])) /// .get_matches_from(vec![ /// "prog" /// ]); /// - /// assert_eq!(m.value_of("other"), None); + /// assert_eq!(m.get_one::("other"), None); /// ``` /// /// We can also see that these values are applied in order, and if more than one condition is /// true, only the first evaluated "wins" /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; + /// # use clap::builder::ArgPredicate; /// let m = Command::new("prog") /// .arg(Arg::new("flag") - /// .long("flag")) + /// .long("flag") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("opt") /// .long("opt") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .arg(Arg::new("other") /// .long("other") - /// .default_value_ifs(&[ - /// ("flag", None, Some("default")), - /// ("opt", Some("channal"), Some("chan")), + /// .default_value_ifs([ + /// ("flag", ArgPredicate::IsPresent, Some("default")), + /// ("opt", ArgPredicate::Equals("channal".into()), Some("chan")), /// ])) /// .get_matches_from(vec![ /// "prog", "--opt", "channal", "--flag" /// ]); /// - /// assert_eq!(m.value_of("other"), Some("default")); + /// assert_eq!(m.get_one::("other").unwrap(), "default"); /// ``` - /// [`Arg::takes_value(true)`]: Arg::takes_value() + /// [`Arg::action(ArgAction::Set)`]: Arg::action() /// [`Arg::default_value_if`]: Arg::default_value_if() #[must_use] - pub fn default_value_ifs( + pub fn default_value_ifs( mut self, - ifs: &[(T, Option<&'help str>, Option<&'help str>)], + ifs: impl IntoIterator< + Item = ( + impl Into, + impl Into, + impl IntoResettable, + ), + >, ) -> Self { - for (arg, val, default) in ifs { - self = self.default_value_if_os(arg, val.map(OsStr::new), default.map(OsStr::new)); + for (arg, predicate, default) in ifs { + self = self.default_value_if(arg, predicate, default); } self } - /// Provides multiple conditional default values in the exact same manner as - /// [`Arg::default_value_ifs`] only using [`OsStr`]s instead. - /// - /// [`Arg::default_value_ifs`]: Arg::default_value_ifs() - /// [`OsStr`]: std::ffi::OsStr #[must_use] - pub fn default_value_ifs_os( - mut self, - ifs: &[(T, Option<&'help OsStr>, Option<&'help OsStr>)], + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::default_value_ifs`") + )] + pub fn default_value_ifs_os( + self, + ifs: impl IntoIterator< + Item = ( + impl Into, + impl Into, + impl IntoResettable, + ), + >, ) -> Self { - for (arg, val, default) in ifs { - self = self.default_value_if_os(arg, *val, *default); - } - self + self.default_value_ifs(ifs) } /// Set this arg as [required] as long as the specified argument is not present at runtime. @@ -3510,14 +2910,15 @@ /// but it's not an error because the `unless` arg has been supplied. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") /// .required_unless_present("dbg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("dbg") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .try_get_matches_from(vec![ /// "prog", "--debug" /// ]); @@ -3528,11 +2929,11 @@ /// Setting `Arg::required_unless_present(name)` and *not* supplying `name` or this arg is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") /// .required_unless_present("dbg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("dbg") /// .long("debug")) @@ -3545,8 +2946,12 @@ /// ``` /// [required]: Arg::required() #[must_use] - pub fn required_unless_present(mut self, arg_id: T) -> Self { - self.r_unless.push(arg_id.into()); + pub fn required_unless_present(mut self, arg_id: impl IntoResettable) -> Self { + if let Some(arg_id) = arg_id.into_resettable().into_option() { + self.r_unless.push(arg_id); + } else { + self.r_unless.clear(); + } self } @@ -3564,7 +2969,7 @@ /// ```rust /// # use clap::Arg; /// Arg::new("config") - /// .required_unless_present_all(&["cfg", "dbg"]) + /// .required_unless_present_all(["cfg", "dbg"]) /// # ; /// ``` /// @@ -3572,17 +2977,18 @@ /// because *all* of the `names` args have been supplied. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_unless_present_all(&["dbg", "infile"]) - /// .takes_value(true) + /// .required_unless_present_all(["dbg", "infile"]) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("dbg") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("infile") /// .short('i') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog", "--debug", "-i", "file" /// ]); @@ -3594,17 +3000,18 @@ /// either *all* of `unless` args or the `self` arg is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_unless_present_all(&["dbg", "infile"]) - /// .takes_value(true) + /// .required_unless_present_all(["dbg", "infile"]) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("dbg") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("infile") /// .short('i') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog" /// ]); @@ -3616,12 +3023,11 @@ /// [`Arg::required_unless_present_any`]: Arg::required_unless_present_any() /// [`Arg::required_unless_present_all(names)`]: Arg::required_unless_present_all() #[must_use] - pub fn required_unless_present_all(mut self, names: I) -> Self - where - I: IntoIterator, - T: Key, - { - self.r_unless_all.extend(names.into_iter().map(Id::from)); + pub fn required_unless_present_all( + mut self, + names: impl IntoIterator>, + ) -> Self { + self.r_unless_all.extend(names.into_iter().map(Into::into)); self } @@ -3639,7 +3045,7 @@ /// ```rust /// # use clap::Arg; /// Arg::new("config") - /// .required_unless_present_any(&["cfg", "dbg"]) + /// .required_unless_present_any(["cfg", "dbg"]) /// # ; /// ``` /// @@ -3649,17 +3055,18 @@ /// have been supplied. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_unless_present_any(&["dbg", "infile"]) - /// .takes_value(true) + /// .required_unless_present_any(["dbg", "infile"]) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("dbg") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("infile") /// .short('i') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog", "--debug" /// ]); @@ -3671,17 +3078,18 @@ /// or this arg is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_unless_present_any(&["dbg", "infile"]) - /// .takes_value(true) + /// .required_unless_present_any(["dbg", "infile"]) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("dbg") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("infile") /// .short('i') - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog" /// ]); @@ -3693,12 +3101,11 @@ /// [`Arg::required_unless_present_any(names)`]: Arg::required_unless_present_any() /// [`Arg::required_unless_present_all`]: Arg::required_unless_present_all() #[must_use] - pub fn required_unless_present_any(mut self, names: I) -> Self - where - I: IntoIterator, - T: Key, - { - self.r_unless.extend(names.into_iter().map(Id::from)); + pub fn required_unless_present_any( + mut self, + names: impl IntoIterator>, + ) -> Self { + self.r_unless.extend(names.into_iter().map(Into::into)); self } @@ -3715,15 +3122,15 @@ /// ``` /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .required_if_eq("other", "special") /// .long("config")) /// .arg(Arg::new("other") /// .long("other") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog", "--other", "not-special" /// ]); @@ -3732,12 +3139,12 @@ /// /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .required_if_eq("other", "special") /// .long("config")) /// .arg(Arg::new("other") /// .long("other") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog", "--other", "special" /// ]); @@ -3748,12 +3155,12 @@ /// /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .required_if_eq("other", "special") /// .long("config")) /// .arg(Arg::new("other") /// .long("other") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog", "--other", "SPECIAL" /// ]); @@ -3763,13 +3170,13 @@ /// /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .required_if_eq("other", "special") /// .long("config")) /// .arg(Arg::new("other") /// .long("other") /// .ignore_case(true) - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .try_get_matches_from(vec![ /// "prog", "--other", "SPECIAL" /// ]); @@ -3782,8 +3189,8 @@ /// [Conflicting]: Arg::conflicts_with() /// [required]: Arg::required() #[must_use] - pub fn required_if_eq(mut self, arg_id: T, val: &'help str) -> Self { - self.r_ifs.push((arg_id.into(), val)); + pub fn required_if_eq(mut self, arg_id: impl Into, val: impl Into) -> Self { + self.r_ifs.push((arg_id.into(), val.into())); self } @@ -3797,32 +3204,32 @@ /// ```rust /// # use clap::Arg; /// Arg::new("config") - /// .required_if_eq_any(&[ + /// .required_if_eq_any([ /// ("extra", "val"), /// ("option", "spec") /// ]) /// # ; /// ``` /// - /// Setting `Arg::required_if_eq_any(&[(arg, val)])` makes this arg required if any of the `arg`s + /// Setting `Arg::required_if_eq_any([(arg, val)])` makes this arg required if any of the `arg`s /// are used at runtime and it's corresponding value is equal to `val`. If the `arg`'s value is /// anything other than `val`, this argument isn't required. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_if_eq_any(&[ + /// .required_if_eq_any([ /// ("extra", "val"), /// ("option", "spec") /// ]) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("extra") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("extra")) /// .arg(Arg::new("option") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("option")) /// .try_get_matches_from(vec![ /// "prog", "--option", "other" @@ -3831,24 +3238,24 @@ /// assert!(res.is_ok()); // We didn't use --option=spec, or --extra=val so "cfg" isn't required /// ``` /// - /// Setting `Arg::required_if_eq_any(&[(arg, val)])` and having any of the `arg`s used with its + /// Setting `Arg::required_if_eq_any([(arg, val)])` and having any of the `arg`s used with its /// value of `val` but *not* using this arg is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_if_eq_any(&[ + /// .required_if_eq_any([ /// ("extra", "val"), /// ("option", "spec") /// ]) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("extra") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("extra")) /// .arg(Arg::new("option") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("option")) /// .try_get_matches_from(vec![ /// "prog", "--option", "spec" @@ -3861,9 +3268,12 @@ /// [Conflicting]: Arg::conflicts_with() /// [required]: Arg::required() #[must_use] - pub fn required_if_eq_any(mut self, ifs: &[(T, &'help str)]) -> Self { + pub fn required_if_eq_any( + mut self, + ifs: impl IntoIterator, impl Into)>, + ) -> Self { self.r_ifs - .extend(ifs.iter().map(|(id, val)| (Id::from_ref(id), *val))); + .extend(ifs.into_iter().map(|(id, val)| (id.into(), val.into()))); self } @@ -3877,32 +3287,32 @@ /// ```rust /// # use clap::Arg; /// Arg::new("config") - /// .required_if_eq_all(&[ + /// .required_if_eq_all([ /// ("extra", "val"), /// ("option", "spec") /// ]) /// # ; /// ``` /// - /// Setting `Arg::required_if_eq_all(&[(arg, val)])` makes this arg required if all of the `arg`s + /// Setting `Arg::required_if_eq_all([(arg, val)])` makes this arg required if all of the `arg`s /// are used at runtime and every value is equal to its corresponding `val`. If the `arg`'s value is /// anything other than `val`, this argument isn't required. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_if_eq_all(&[ + /// .required_if_eq_all([ /// ("extra", "val"), /// ("option", "spec") /// ]) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("extra") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("extra")) /// .arg(Arg::new("option") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("option")) /// .try_get_matches_from(vec![ /// "prog", "--option", "spec" @@ -3911,24 +3321,24 @@ /// assert!(res.is_ok()); // We didn't use --option=spec --extra=val so "cfg" isn't required /// ``` /// - /// Setting `Arg::required_if_eq_all(&[(arg, val)])` and having all of the `arg`s used with its + /// Setting `Arg::required_if_eq_all([(arg, val)])` and having all of the `arg`s used with its /// value of `val` but *not* using this arg is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .required_if_eq_all(&[ + /// .required_if_eq_all([ /// ("extra", "val"), /// ("option", "spec") /// ]) - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("config")) /// .arg(Arg::new("extra") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("extra")) /// .arg(Arg::new("option") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .long("option")) /// .try_get_matches_from(vec![ /// "prog", "--extra", "val", "--option", "spec" @@ -3939,16 +3349,19 @@ /// ``` /// [required]: Arg::required() #[must_use] - pub fn required_if_eq_all(mut self, ifs: &[(T, &'help str)]) -> Self { + pub fn required_if_eq_all( + mut self, + ifs: impl IntoIterator, impl Into)>, + ) -> Self { self.r_ifs_all - .extend(ifs.iter().map(|(id, val)| (Id::from_ref(id), *val))); + .extend(ifs.into_iter().map(|(id, val)| (id.into(), val.into()))); self } - /// Require another argument if this arg was present at runtime and its value equals to `val`. + /// Require another argument if this arg matches the [`ArgPredicate`] /// /// This method takes `value, another_arg` pair. At runtime, clap will check - /// if this arg (`self`) is present and its value equals to `val`. + /// if this arg (`self`) matches the [`ArgPredicate`]. /// If it does, `another_arg` will be marked as required. /// /// # Examples @@ -3965,10 +3378,10 @@ /// `val`, the other argument isn't required. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .requires_if("my.cfg", "other") /// .long("config")) /// .arg(Arg::new("other")) @@ -3983,10 +3396,10 @@ /// `arg` is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .requires_if("my.cfg", "input") /// .long("config")) /// .arg(Arg::new("input")) @@ -4001,45 +3414,45 @@ /// [Conflicting]: Arg::conflicts_with() /// [override]: Arg::overrides_with() #[must_use] - pub fn requires_if(mut self, val: &'help str, arg_id: T) -> Self { - self.requires - .push((ArgPredicate::Equals(OsStr::new(val)), arg_id.into())); + pub fn requires_if(mut self, val: impl Into, arg_id: impl Into) -> Self { + self.requires.push((val.into(), arg_id.into())); self } /// Allows multiple conditional requirements. /// - /// The requirement will only become valid if this arg's value equals `val`. + /// The requirement will only become valid if this arg's value matches the + /// [`ArgPredicate`]. /// /// # Examples /// /// ```rust /// # use clap::Arg; /// Arg::new("config") - /// .requires_ifs(&[ + /// .requires_ifs([ /// ("val", "arg"), /// ("other_val", "arg2"), /// ]) /// # ; /// ``` /// - /// Setting `Arg::requires_ifs(&["val", "arg"])` requires that the `arg` be used at runtime if the + /// Setting `Arg::requires_ifs(["val", "arg"])` requires that the `arg` be used at runtime if the /// defining argument's value is equal to `val`. If the defining argument's value is anything other /// than `val`, `arg` isn't required. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) - /// .requires_ifs(&[ + /// .action(ArgAction::Set) + /// .requires_ifs([ /// ("special.conf", "opt"), /// ("other.conf", "other"), /// ]) /// .long("config")) /// .arg(Arg::new("opt") /// .long("option") - /// .takes_value(true)) + /// .action(ArgAction::Set)) /// .arg(Arg::new("other")) /// .try_get_matches_from(vec![ /// "prog", "--config", "special.conf" @@ -4048,63 +3461,19 @@ /// assert!(res.is_err()); // We used --config=special.conf so --option is required /// assert_eq!(res.unwrap_err().kind(), ErrorKind::MissingRequiredArgument); /// ``` - /// [`Arg::requires(name)`]: Arg::requires() - /// [Conflicting]: Arg::conflicts_with() - /// [override]: Arg::overrides_with() - #[must_use] - pub fn requires_ifs(mut self, ifs: &[(&'help str, T)]) -> Self { - self.requires.extend( - ifs.iter() - .map(|(val, arg)| (ArgPredicate::Equals(OsStr::new(*val)), Id::from(arg))), - ); - self - } - - /// Require these arguments names when this one is presen /// - /// i.e. when using this argument, the following arguments *must* be present. - /// - /// **NOTE:** [Conflicting] rules and [override] rules take precedence over being required - /// by default. - /// - /// # Examples + /// Setting `Arg::requires_ifs` with [`ArgPredicate::IsPresent`] and *not* supplying all the + /// arguments is an error. /// /// ```rust - /// # use clap::Arg; - /// Arg::new("config") - /// .requires_all(&["input", "output"]) - /// # ; - /// ``` - /// - /// Setting `Arg::requires_all(&[arg, arg2])` requires that all the arguments be used at - /// runtime if the defining argument is used. If the defining argument isn't used, the other - /// argument isn't required - /// - /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction, builder::ArgPredicate}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) - /// .requires("input") - /// .long("config")) - /// .arg(Arg::new("input")) - /// .arg(Arg::new("output")) - /// .try_get_matches_from(vec![ - /// "prog" - /// ]); - /// - /// assert!(res.is_ok()); // We didn't use cfg, so input and output weren't required - /// ``` - /// - /// Setting `Arg::requires_all(&[arg, arg2])` and *not* supplying all the arguments is an - /// error. - /// - /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; - /// let res = Command::new("prog") - /// .arg(Arg::new("cfg") - /// .takes_value(true) - /// .requires_all(&["input", "output"]) + /// .action(ArgAction::Set) + /// .requires_ifs([ + /// (ArgPredicate::IsPresent, "input"), + /// (ArgPredicate::IsPresent, "output"), + /// ]) /// .long("config")) /// .arg(Arg::new("input")) /// .arg(Arg::new("output")) @@ -4116,15 +3485,29 @@ /// // We didn't use output /// assert_eq!(res.unwrap_err().kind(), ErrorKind::MissingRequiredArgument); /// ``` + /// + /// [`Arg::requires(name)`]: Arg::requires() /// [Conflicting]: Arg::conflicts_with() /// [override]: Arg::overrides_with() #[must_use] - pub fn requires_all(mut self, names: &[T]) -> Self { + pub fn requires_ifs( + mut self, + ifs: impl IntoIterator, impl Into)>, + ) -> Self { self.requires - .extend(names.iter().map(|s| (ArgPredicate::IsPresent, s.into()))); + .extend(ifs.into_iter().map(|(val, arg)| (val.into(), arg.into()))); self } + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::requires_ifs`") + )] + pub fn requires_all(self, ids: impl IntoIterator>) -> Self { + self.requires_ifs(ids.into_iter().map(|id| (ArgPredicate::IsPresent, id))) + } + /// This argument is mutually exclusive with the specified argument. /// /// **NOTE:** Conflicting rules take precedence over being required by default. Conflict rules @@ -4138,6 +3521,8 @@ /// /// **NOTE** [`Arg::exclusive(true)`] allows specifying an argument which conflicts with every other argument. /// + /// **NOTE:** All arguments implicitly conflict with themselves. + /// /// # Examples /// /// ```rust @@ -4150,14 +3535,15 @@ /// Setting conflicting argument, and having both arguments present at runtime is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .conflicts_with("debug") /// .long("config")) /// .arg(Arg::new("debug") - /// .long("debug")) + /// .long("debug") + /// .action(ArgAction::SetTrue)) /// .try_get_matches_from(vec![ /// "prog", "--debug", "--config", "file.conf" /// ]); @@ -4169,8 +3555,12 @@ /// [`Arg::conflicts_with_all(names)`]: Arg::conflicts_with_all() /// [`Arg::exclusive(true)`]: Arg::exclusive() #[must_use] - pub fn conflicts_with(mut self, arg_id: T) -> Self { - self.blacklist.push(arg_id.into()); + pub fn conflicts_with(mut self, arg_id: impl IntoResettable) -> Self { + if let Some(arg_id) = arg_id.into_resettable().into_option() { + self.blacklist.push(arg_id); + } else { + self.blacklist.clear(); + } self } @@ -4192,7 +3582,7 @@ /// ```rust /// # use clap::Arg; /// Arg::new("config") - /// .conflicts_with_all(&["debug", "input"]) + /// .conflicts_with_all(["debug", "input"]) /// # ; /// ``` /// @@ -4200,11 +3590,11 @@ /// conflicting argument is an error. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind, ArgAction}; /// let res = Command::new("prog") /// .arg(Arg::new("cfg") - /// .takes_value(true) - /// .conflicts_with_all(&["debug", "input"]) + /// .action(ArgAction::Set) + /// .conflicts_with_all(["debug", "input"]) /// .long("config")) /// .arg(Arg::new("debug") /// .long("debug")) @@ -4219,8 +3609,8 @@ /// [`Arg::conflicts_with`]: Arg::conflicts_with() /// [`Arg::exclusive(true)`]: Arg::exclusive() #[must_use] - pub fn conflicts_with_all(mut self, names: &[&str]) -> Self { - self.blacklist.extend(names.iter().copied().map(Id::from)); + pub fn conflicts_with_all(mut self, names: impl IntoIterator>) -> Self { + self.blacklist.extend(names.into_iter().map(Into::into)); self } @@ -4235,12 +3625,6 @@ /// /// **NOTE:** Overriding an argument implies they [conflict][Arg::conflicts_with`]. /// - /// **WARNING:** Positional arguments and options which accept - /// [`Arg::multiple_occurrences`] cannot override themselves (or we - /// would never be able to advance to the next positional). If a positional - /// argument or option with one of the [`Arg::multiple_occurrences`] - /// settings lists itself as an override, it is simply ignored. - /// /// # Examples /// /// ```rust @@ -4255,85 +3639,18 @@ /// "prog", "-f", "-d", "-c"]); /// // ^~~~~~~~~~~~^~~~~ flag is overridden by color /// - /// assert!(m.is_present("color")); - /// assert!(m.is_present("debug")); // even though flag conflicts with debug, it's as if flag + /// assert!(*m.get_one::("color").unwrap()); + /// assert!(*m.get_one::("debug").unwrap()); // even though flag conflicts with debug, it's as if flag /// // was never used because it was overridden with color - /// assert!(!m.is_present("flag")); - /// ``` - /// Care must be taken when using this setting, and having an arg override with itself. This - /// is common practice when supporting things like shell aliases, config files, etc. - /// However, when combined with multiple values, it can get dicy. - /// Here is how clap handles such situations: - /// - /// When a flag overrides itself, it's as if the flag was only ever used once (essentially - /// preventing a "Unexpected multiple usage" error): - /// - /// ```rust - /// # use clap::{Command, arg}; - /// let m = Command::new("posix") - /// .arg(arg!(--flag "some flag").overrides_with("flag")) - /// .get_matches_from(vec!["posix", "--flag", "--flag"]); - /// assert!(m.is_present("flag")); - /// ``` - /// - /// Making an arg [`Arg::multiple_occurrences`] and override itself - /// is essentially meaningless. Therefore clap ignores an override of self - /// if it's a flag and it already accepts multiple occurrences. - /// - /// ``` - /// # use clap::{Command, arg}; - /// let m = Command::new("posix") - /// .arg(arg!(--flag ... "some flag").overrides_with("flag")) - /// .get_matches_from(vec!["", "--flag", "--flag", "--flag", "--flag"]); - /// assert!(m.is_present("flag")); - /// ``` - /// - /// Now notice with options (which *do not* set - /// [`Arg::multiple_occurrences`]), it's as if only the last - /// occurrence happened. - /// - /// ``` - /// # use clap::{Command, arg}; - /// let m = Command::new("posix") - /// .arg(arg!(--opt "some option").overrides_with("opt")) - /// .get_matches_from(vec!["", "--opt=some", "--opt=other"]); - /// assert!(m.is_present("opt")); - /// assert_eq!(m.value_of("opt"), Some("other")); - /// ``` - /// - /// This will also work when [`Arg::multiple_values`] is enabled: - /// - /// ``` - /// # use clap::{Command, Arg}; - /// let m = Command::new("posix") - /// .arg( - /// Arg::new("opt") - /// .long("opt") - /// .takes_value(true) - /// .multiple_values(true) - /// .overrides_with("opt") - /// ) - /// .get_matches_from(vec!["", "--opt", "1", "2", "--opt", "3", "4", "5"]); - /// assert!(m.is_present("opt")); - /// assert_eq!(m.values_of("opt").unwrap().collect::>(), &["3", "4", "5"]); - /// ``` - /// - /// Just like flags, options with [`Arg::multiple_occurrences`] set - /// will ignore the "override self" setting. - /// - /// ``` - /// # use clap::{Command, arg}; - /// let m = Command::new("posix") - /// .arg(arg!(--opt ... "some option") - /// .multiple_values(true) - /// .overrides_with("opt")) - /// .get_matches_from(vec!["", "--opt", "first", "over", "--opt", "other", "val"]); - /// assert!(m.is_present("opt")); - /// assert_eq!(m.values_of("opt").unwrap().collect::>(), &["first", "over", "other", "val"]); + /// assert!(!*m.get_one::("flag").unwrap()); /// ``` #[must_use] - pub fn overrides_with(mut self, arg_id: T) -> Self { - self.overrides.push(arg_id.into()); + pub fn overrides_with(mut self, arg_id: impl IntoResettable) -> Self { + if let Some(arg_id) = arg_id.into_resettable().into_option() { + self.overrides.push(arg_id); + } else { + self.overrides.clear(); + } self } @@ -4356,45 +3673,36 @@ /// .conflicts_with("color")) /// .arg(arg!(-d --debug "other flag")) /// .arg(arg!(-c --color "third flag") - /// .overrides_with_all(&["flag", "debug"])) + /// .overrides_with_all(["flag", "debug"])) /// .get_matches_from(vec![ /// "prog", "-f", "-d", "-c"]); /// // ^~~~~~^~~~~~~~~ flag and debug are overridden by color /// - /// assert!(m.is_present("color")); // even though flag conflicts with color, it's as if flag + /// assert!(*m.get_one::("color").unwrap()); // even though flag conflicts with color, it's as if flag /// // and debug were never used because they were overridden /// // with color - /// assert!(!m.is_present("debug")); - /// assert!(!m.is_present("flag")); + /// assert!(!*m.get_one::("debug").unwrap()); + /// assert!(!*m.get_one::("flag").unwrap()); /// ``` #[must_use] - pub fn overrides_with_all(mut self, names: &[T]) -> Self { - self.overrides.extend(names.iter().map(Id::from)); + pub fn overrides_with_all(mut self, names: impl IntoIterator>) -> Self { + self.overrides.extend(names.into_iter().map(Into::into)); self } } /// # Reflection -impl<'help> Arg<'help> { +impl Arg { /// Get the name of the argument #[inline] - pub fn get_id(&self) -> &'help str { - self.name - } - - /// Deprecated, replaced with [`Arg::get_id`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.0", note = "Replaced with `Arg::get_id`") - )] - pub fn get_name(&self) -> &'help str { - self.get_id() + pub fn get_id(&self) -> &Id { + &self.id } /// Get the help specified for this argument, if any #[inline] - pub fn get_help(&self) -> Option<&'help str> { - self.help + pub fn get_help(&self) -> Option<&StyledStr> { + self.help.as_ref() } /// Get the long help specified for this argument, if any @@ -4404,18 +3712,21 @@ /// ```rust /// # use clap::Arg; /// let arg = Arg::new("foo").long_help("long help"); - /// assert_eq!(Some("long help"), arg.get_long_help()); + /// assert_eq!(Some("long help".to_owned()), arg.get_long_help().map(|s| s.to_string())); /// ``` /// #[inline] - pub fn get_long_help(&self) -> Option<&'help str> { - self.long_help + pub fn get_long_help(&self) -> Option<&StyledStr> { + self.long_help.as_ref() } /// Get the help heading specified for this argument, if any #[inline] - pub fn get_help_heading(&self) -> Option<&'help str> { - self.help_heading.unwrap_or_default() + pub fn get_help_heading(&self) -> Option<&str> { + self.help_heading + .as_ref() + .map(|s| s.as_deref()) + .unwrap_or_default() } /// Get the short option name for this argument, if any @@ -4465,21 +3776,20 @@ /// Get the long option name for this argument, if any #[inline] - pub fn get_long(&self) -> Option<&'help str> { - self.long + pub fn get_long(&self) -> Option<&str> { + self.long.as_deref() } /// Get visible aliases for this argument, if any #[inline] - pub fn get_visible_aliases(&self) -> Option> { + pub fn get_visible_aliases(&self) -> Option> { if self.aliases.is_empty() { None } else { Some( self.aliases .iter() - .filter_map(|(s, v)| if *v { Some(s) } else { None }) - .copied() + .filter_map(|(s, v)| if *v { Some(s.as_str()) } else { None }) .collect(), ) } @@ -4487,18 +3797,18 @@ /// Get *all* aliases for this argument, if any, both visible and hidden. #[inline] - pub fn get_all_aliases(&self) -> Option> { + pub fn get_all_aliases(&self) -> Option> { if self.aliases.is_empty() { None } else { - Some(self.aliases.iter().map(|(s, _)| s).copied().collect()) + Some(self.aliases.iter().map(|(s, _)| s.as_str()).collect()) } } /// Get the long option name and its visible aliases, if any #[inline] - pub fn get_long_and_visible_aliases(&self) -> Option> { - let mut longs = match self.long { + pub fn get_long_and_visible_aliases(&self) -> Option> { + let mut longs = match self.get_long() { Some(long) => vec![long], None => return None, }; @@ -4508,30 +3818,11 @@ Some(longs) } - /// Deprecated, replaced with [`Arg::get_value_parser().possible_values()`] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::get_value_parser().possible_values()`" - ) - )] - pub fn get_possible_values(&self) -> Option<&[PossibleValue<'help>]> { - if self.possible_vals.is_empty() { - None - } else { - Some(&self.possible_vals) - } - } - - pub(crate) fn get_possible_values2(&self) -> Vec> { - #![allow(deprecated)] + /// Get the names of possible values for this argument. Only useful for user + /// facing applications, such as building help messages or man files + pub fn get_possible_values(&self) -> Vec { if !self.is_takes_value_set() { vec![] - } else if let Some(pvs) = self.get_possible_values() { - // Check old first in case the user explicitly set possible values and the derive inferred - // a `ValueParser` with some. - pvs.to_vec() } else { self.get_value_parser() .possible_values() @@ -4542,7 +3833,7 @@ /// Get the names of values for this argument. #[inline] - pub fn get_value_names(&self) -> Option<&[&'help str]> { + pub fn get_value_names(&self) -> Option<&[Str]> { if self.val_names.is_empty() { None } else { @@ -4552,10 +3843,15 @@ /// Get the number of values for this argument. #[inline] - pub fn get_num_vals(&self) -> Option { + pub fn get_num_args(&self) -> Option { self.num_vals } + #[inline] + pub(crate) fn get_min_vals(&self) -> usize { + self.get_num_args().expect(INTERNAL_ERROR_MSG).min_values() + } + /// Get the delimiter between multiple values #[inline] pub fn get_value_delimiter(&self) -> Option { @@ -4584,15 +3880,6 @@ }) } - /// Deprecated, replaced with [`Arg::is_global_set`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.0", note = "Replaced with `Arg::is_global_set`") - )] - pub fn get_global(&self) -> bool { - self.is_global_set() - } - /// Get the environment variable name specified for this argument, if any /// /// # Examples @@ -4601,11 +3888,11 @@ /// # use std::ffi::OsStr; /// # use clap::Arg; /// let arg = Arg::new("foo").env("ENVIRONMENT"); - /// assert_eq!(Some(OsStr::new("ENVIRONMENT")), arg.get_env()); + /// assert_eq!(arg.get_env(), Some(OsStr::new("ENVIRONMENT"))); /// ``` #[cfg(feature = "env")] - pub fn get_env(&self) -> Option<&OsStr> { - self.env.as_ref().map(|x| x.0) + pub fn get_env(&self) -> Option<&std::ffi::OsStr> { + self.env.as_ref().map(|x| x.0.as_os_str()) } /// Get the default values specified for this argument, if any @@ -4615,9 +3902,9 @@ /// ```rust /// # use clap::Arg; /// let arg = Arg::new("foo").default_value("default value"); - /// assert_eq!(&["default value"], arg.get_default_values()); + /// assert_eq!(arg.get_default_values(), &["default value"]); /// ``` - pub fn get_default_values(&self) -> &[&OsStr] { + pub fn get_default_values(&self) -> &[OsStr] { &self.default_vals } @@ -4628,13 +3915,13 @@ /// ``` /// # use clap::Arg; /// let arg = Arg::new("foo"); - /// assert_eq!(true, arg.is_positional()); + /// assert_eq!(arg.is_positional(), true); /// /// let arg = Arg::new("foo").long("foo"); - /// assert_eq!(false, arg.is_positional()); + /// assert_eq!(arg.is_positional(), false); /// ``` pub fn is_positional(&self) -> bool { - self.long.is_none() && self.short.is_none() + self.get_long().is_none() && self.get_short().is_none() } /// Reports whether [`Arg::required`] is set @@ -4642,23 +3929,12 @@ self.is_set(ArgSettings::Required) } - /// Report whether [`Arg::multiple_values`] is set - pub fn is_multiple_values_set(&self) -> bool { - self.is_set(ArgSettings::MultipleValues) - } - - /// [`Arg::multiple_occurrences`] is going away ([Issue #3772](https://github.com/clap-rs/clap/issues/3772)) - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.2.0", note = "`multiple_occurrences` away (Issue #3772)") - )] - pub fn is_multiple_occurrences_set(&self) -> bool { - self.is_set(ArgSettings::MultipleOccurrences) + pub(crate) fn is_multiple_values_set(&self) -> bool { + self.get_num_args().unwrap_or_default().is_multiple() } - /// Report whether [`Arg::is_takes_value_set`] is set - pub fn is_takes_value_set(&self) -> bool { - self.is_set(ArgSettings::TakesValue) + pub(crate) fn is_takes_value_set(&self) -> bool { + self.get_action().takes_values() } /// Report whether [`Arg::allow_hyphen_values`] is set @@ -4666,27 +3942,14 @@ self.is_set(ArgSettings::AllowHyphenValues) } - /// Deprecated, replaced with [`Arg::get_value_parser()`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.2.0", note = "Replaced with `Arg::get_value_parser()`") - )] - pub fn is_forbid_empty_values_set(&self) -> bool { - self.is_set(ArgSettings::ForbidEmptyValues) - } - - /// Deprecated, replaced with [`Arg::get_value_parser()` - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.2.0", note = "Replaced with `Arg::get_value_parser()`") - )] - pub fn is_allow_invalid_utf8_set(&self) -> bool { - self.is_set(ArgSettings::AllowInvalidUtf8) + /// Report whether [`Arg::allow_negative_numbers`] is set + pub fn is_allow_negative_numbers_set(&self) -> bool { + self.is_set(ArgSettings::AllowNegativeNumbers) } /// Behavior when parsing the argument pub fn get_action(&self) -> &super::ArgAction { - const DEFAULT: super::ArgAction = super::ArgAction::StoreValue; + const DEFAULT: super::ArgAction = super::ArgAction::Set; self.action.as_ref().unwrap_or(&DEFAULT) } @@ -4708,9 +3971,6 @@ pub fn get_value_parser(&self) -> &super::ValueParser { if let Some(value_parser) = self.value_parser.as_ref() { value_parser - } else if self.is_allow_invalid_utf8_set() { - static DEFAULT: super::ValueParser = super::ValueParser::os_string(); - &DEFAULT } else { static DEFAULT: super::ValueParser = super::ValueParser::string(); &DEFAULT @@ -4764,16 +4024,6 @@ self.is_set(ArgSettings::HiddenLongHelp) } - /// Report whether [`Arg::use_value_delimiter`] is set - pub fn is_use_value_delimiter_set(&self) -> bool { - self.is_set(ArgSettings::UseValueDelimiter) - } - - /// Report whether [`Arg::require_value_delimiter`] is set - pub fn is_require_value_delimiter_set(&self) -> bool { - self.is_set(ArgSettings::RequireDelimiter) - } - /// Report whether [`Arg::require_equals`] is set pub fn is_require_equals_set(&self) -> bool { self.is_set(ArgSettings::RequireEquals) @@ -4784,6 +4034,11 @@ self.is_set(ArgSettings::Exclusive) } + /// Report whether [`Arg::trailing_var_arg`] is set + pub fn is_trailing_var_arg_set(&self) -> bool { + self.is_set(ArgSettings::TrailingVarArg) + } + /// Reports whether [`Arg::last`] is set pub fn is_last_set(&self) -> bool { self.is_set(ArgSettings::Last) @@ -4795,301 +4050,36 @@ } } -/// # Deprecated -impl<'help> Arg<'help> { - /// Deprecated, replaced with [`Arg::new`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::new`") - )] - #[doc(hidden)] - pub fn with_name>(n: S) -> Self { - Self::new(n) - } - - /// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? - #[cfg(feature = "yaml")] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Deprecated in Issue #3087, maybe clap::Parser would fit your use case?" - ) - )] - #[doc(hidden)] - pub fn from_yaml(y: &'help Yaml) -> Self { - #![allow(deprecated)] - let yaml_file_hash = y.as_hash().expect("YAML file must be a hash"); - // We WANT this to panic on error...so expect() is good. - let (name_yaml, yaml) = yaml_file_hash - .iter() - .next() - .expect("There must be one arg in the YAML file"); - let name_str = name_yaml.as_str().expect("Arg name must be a string"); - let mut a = Arg::new(name_str); - - for (k, v) in yaml.as_hash().expect("Arg must be a hash") { - a = match k.as_str().expect("Arg fields must be strings") { - "short" => yaml_to_char!(a, v, short), - "long" => yaml_to_str!(a, v, long), - "aliases" => yaml_vec_or_str!(a, v, alias), - "help" => yaml_to_str!(a, v, help), - "long_help" => yaml_to_str!(a, v, long_help), - "required" => yaml_to_bool!(a, v, required), - "required_if" => yaml_tuple2!(a, v, required_if_eq), - "required_ifs" => yaml_tuple2!(a, v, required_if_eq), - "takes_value" => yaml_to_bool!(a, v, takes_value), - "index" => yaml_to_usize!(a, v, index), - "global" => yaml_to_bool!(a, v, global), - "multiple" => yaml_to_bool!(a, v, multiple), - "hidden" => yaml_to_bool!(a, v, hide), - "next_line_help" => yaml_to_bool!(a, v, next_line_help), - "group" => yaml_to_str!(a, v, group), - "number_of_values" => yaml_to_usize!(a, v, number_of_values), - "max_values" => yaml_to_usize!(a, v, max_values), - "min_values" => yaml_to_usize!(a, v, min_values), - "value_name" => yaml_to_str!(a, v, value_name), - "use_delimiter" => yaml_to_bool!(a, v, use_delimiter), - "allow_hyphen_values" => yaml_to_bool!(a, v, allow_hyphen_values), - "last" => yaml_to_bool!(a, v, last), - "require_delimiter" => yaml_to_bool!(a, v, require_delimiter), - "value_delimiter" => yaml_to_char!(a, v, value_delimiter), - "required_unless" => yaml_to_str!(a, v, required_unless_present), - "display_order" => yaml_to_usize!(a, v, display_order), - "default_value" => yaml_to_str!(a, v, default_value), - "default_value_if" => yaml_tuple3!(a, v, default_value_if), - "default_value_ifs" => yaml_tuple3!(a, v, default_value_if), - #[cfg(feature = "env")] - "env" => yaml_to_str!(a, v, env), - "value_names" => yaml_vec_or_str!(a, v, value_name), - "groups" => yaml_vec_or_str!(a, v, group), - "requires" => yaml_vec_or_str!(a, v, requires), - "requires_if" => yaml_tuple2!(a, v, requires_if), - "requires_ifs" => yaml_tuple2!(a, v, requires_if), - "conflicts_with" => yaml_vec_or_str!(a, v, conflicts_with), - "overrides_with" => yaml_to_str!(a, v, overrides_with), - "possible_values" => yaml_vec_or_str!(a, v, possible_value), - "case_insensitive" => yaml_to_bool!(a, v, ignore_case), - "required_unless_one" => yaml_vec!(a, v, required_unless_present_any), - "required_unless_all" => yaml_vec!(a, v, required_unless_present_all), - s => { - panic!( - "Unknown setting '{}' in YAML file for arg '{}'", - s, name_str - ) - } - } - } - - a - } - - /// Deprecated in [Issue #3086](https://github.com/clap-rs/clap/issues/3086), see [`arg!`][crate::arg!]. - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Deprecated in Issue #3086, see `clap::arg!") - )] - #[doc(hidden)] - pub fn from_usage(u: &'help str) -> Self { - UsageParser::from_usage(u).parse() - } - - /// Deprecated, replaced with [`Arg::required_unless_present`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::required_unless_present`") - )] - #[doc(hidden)] - #[must_use] - pub fn required_unless(self, arg_id: T) -> Self { - self.required_unless_present(arg_id) - } - - /// Deprecated, replaced with [`Arg::required_unless_present_all`] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Arg::required_unless_present_all`" - ) - )] - #[doc(hidden)] - #[must_use] - pub fn required_unless_all(self, names: I) -> Self - where - I: IntoIterator, - T: Key, - { - self.required_unless_present_all(names) - } - - /// Deprecated, replaced with [`Arg::required_unless_present_any`] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Arg::required_unless_present_any`" - ) - )] - #[doc(hidden)] - #[must_use] - pub fn required_unless_one(self, names: I) -> Self - where - I: IntoIterator, - T: Key, - { - self.required_unless_present_any(names) - } - - /// Deprecated, replaced with [`Arg::required_if_eq`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::required_if_eq`") - )] - #[doc(hidden)] - #[must_use] - pub fn required_if(self, arg_id: T, val: &'help str) -> Self { - self.required_if_eq(arg_id, val) - } - - /// Deprecated, replaced with [`Arg::required_if_eq_any`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::required_if_eq_any`") - )] - #[doc(hidden)] - #[must_use] - pub fn required_ifs(self, ifs: &[(T, &'help str)]) -> Self { - self.required_if_eq_any(ifs) - } - - /// Deprecated, replaced with [`Arg::hide`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::hide`") - )] - #[doc(hidden)] - #[inline] - #[must_use] - pub fn hidden(self, yes: bool) -> Self { - self.hide(yes) - } - - /// Deprecated, replaced with [`Arg::ignore_case`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::ignore_case`") - )] - #[doc(hidden)] - #[inline] - #[must_use] - pub fn case_insensitive(self, yes: bool) -> Self { - self.ignore_case(yes) - } - - /// Deprecated, replaced with [`Arg::forbid_empty_values`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::forbid_empty_values`") - )] - #[doc(hidden)] - #[must_use] - pub fn empty_values(self, yes: bool) -> Self { - self.forbid_empty_values(!yes) - } - - /// Deprecated, replaced with [`Arg::multiple_occurrences`] (most likely what you want) and - /// [`Arg::multiple_values`] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Split into `Arg::multiple_occurrences` (most likely what you want) and `Arg::multiple_values`" - ) - )] - #[doc(hidden)] - #[must_use] - pub fn multiple(self, yes: bool) -> Self { - self.multiple_occurrences(yes).multiple_values(yes) - } - - /// Deprecated, replaced with [`Arg::hide_short_help`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::hide_short_help`") - )] - #[doc(hidden)] - #[inline] - #[must_use] - pub fn hidden_short_help(self, yes: bool) -> Self { - self.hide_short_help(yes) - } - - /// Deprecated, replaced with [`Arg::hide_long_help`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::hide_long_help`") - )] - #[doc(hidden)] - #[inline] - #[must_use] - pub fn hidden_long_help(self, yes: bool) -> Self { - self.hide_long_help(yes) - } - - /// Deprecated, replaced with [`Arg::setting`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::setting`") - )] - #[doc(hidden)] - #[must_use] - pub fn set(self, s: ArgSettings) -> Self { - self.setting(s) - } - - /// Deprecated, replaced with [`Arg::unset_setting`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `Arg::unset_setting`") - )] - #[doc(hidden)] - #[must_use] - pub fn unset(self, s: ArgSettings) -> Self { - self.unset_setting(s) - } -} - /// # Internally used only -impl<'help> Arg<'help> { +impl Arg { pub(crate) fn _build(&mut self) { - if self.is_positional() { - self.settings.set(ArgSettings::TakesValue); + if self.action.is_none() { + if self.num_vals == Some(ValueRange::EMPTY) { + let action = super::ArgAction::SetTrue; + self.action = Some(action); + } else { + let action = + if self.is_positional() && self.num_vals.unwrap_or_default().is_unbounded() { + // Allow collecting arguments interleaved with flags + // + // Bounded values are probably a group and the user should explicitly opt-in to + // Append + super::ArgAction::Append + } else { + super::ArgAction::Set + }; + self.action = Some(action); + } } if let Some(action) = self.action.as_ref() { if let Some(default_value) = action.default_value() { if self.default_vals.is_empty() { - self.default_vals = vec![default_value]; + self.default_vals = vec![default_value.into()]; } } - if action.takes_values() { - self.settings.set(ArgSettings::TakesValue); - } else { - self.settings.unset(ArgSettings::TakesValue); - } - match action { - ArgAction::StoreValue - | ArgAction::IncOccurrence - | ArgAction::Help - | ArgAction::Version => {} - ArgAction::Set - | ArgAction::Append - | ArgAction::SetTrue - | ArgAction::SetFalse - | ArgAction::Count => { - if !self.is_positional() { - self.settings.set(ArgSettings::MultipleOccurrences); - } + if let Some(default_value) = action.default_missing_value() { + if self.default_missing_vals.is_empty() { + self.default_missing_vals = vec![default_value.into()]; } } } @@ -5097,170 +4087,191 @@ if self.value_parser.is_none() { if let Some(default) = self.action.as_ref().and_then(|a| a.default_value_parser()) { self.value_parser = Some(default); - } else if self.is_allow_invalid_utf8_set() { - self.value_parser = Some(super::ValueParser::os_string()); } else { self.value_parser = Some(super::ValueParser::string()); } } - if (self.is_use_value_delimiter_set() || self.is_require_value_delimiter_set()) - && self.val_delim.is_none() - { - self.val_delim = Some(','); - } - let val_names_len = self.val_names.len(); - if val_names_len > 1 { - self.settings.set(ArgSettings::MultipleValues); - - if self.num_vals.is_none() { - self.num_vals = Some(val_names_len); - } + self.num_vals.get_or_insert(val_names_len.into()); + } else { + let nargs = if self.get_action().takes_values() { + ValueRange::SINGLE + } else { + ValueRange::EMPTY + }; + self.num_vals.get_or_insert(nargs); } + } + + // Used for positionals when printing + pub(crate) fn name_no_brackets(&self) -> String { + debug!("Arg::name_no_brackets:{}", self.get_id()); + let delim = " "; + if !self.val_names.is_empty() { + debug!("Arg::name_no_brackets: val_names={:#?}", self.val_names); - let self_id = self.id.clone(); - if self.is_positional() || self.is_multiple_occurrences_set() { - // Remove self-overrides where they don't make sense. - // - // We can evaluate switching this to a debug assert at a later time (though it will - // require changing propagation of `AllArgsOverrideSelf`). Being conservative for now - // due to where we are at in the release. - self.overrides.retain(|e| *e != self_id); + if self.val_names.len() > 1 { + self.val_names + .iter() + .map(|n| format!("<{}>", n)) + .collect::>() + .join(delim) + } else { + self.val_names + .first() + .expect(INTERNAL_ERROR_MSG) + .as_str() + .to_owned() + } + } else { + debug!("Arg::name_no_brackets: just name"); + self.get_id().as_str().to_owned() } } - pub(crate) fn generated(mut self) -> Self { - self.provider = ArgProvider::Generated; - self + pub(crate) fn stylized(&self, required: Option) -> StyledStr { + let mut styled = StyledStr::new(); + // Write the name such --long or -l + if let Some(l) = self.get_long() { + styled.literal("--"); + styled.literal(l); + } else if let Some(s) = self.get_short() { + styled.literal("-"); + styled.literal(s); + } + styled.extend(self.stylize_arg_suffix(required).into_iter()); + styled } - pub(crate) fn longest_filter(&self) -> bool { - self.is_takes_value_set() || self.long.is_some() || self.short.is_none() - } + pub(crate) fn stylize_arg_suffix(&self, required: Option) -> StyledStr { + let mut styled = StyledStr::new(); - // Used for positionals when printing - pub(crate) fn multiple_str(&self) -> &str { - let mult_vals = self.val_names.len() > 1; - if (self.is_multiple_values_set() || self.is_multiple_occurrences_set()) && !mult_vals { - "..." - } else { - "" + let mut need_closing_bracket = false; + if self.is_takes_value_set() && !self.is_positional() { + let is_optional_val = self.get_min_vals() == 0; + if self.is_require_equals_set() { + if is_optional_val { + need_closing_bracket = true; + styled.placeholder("[="); + } else { + styled.literal("="); + } + } else if is_optional_val { + need_closing_bracket = true; + styled.placeholder(" ["); + } else { + styled.placeholder(" "); + } } + if self.is_takes_value_set() || self.is_positional() { + let required = required.unwrap_or_else(|| self.is_required_set()); + let arg_val = self.render_arg_val(required); + styled.placeholder(arg_val); + } else if matches!(*self.get_action(), ArgAction::Count) { + styled.placeholder("..."); + } + if need_closing_bracket { + styled.placeholder("]"); + } + + styled } - // Used for positionals when printing - pub(crate) fn name_no_brackets(&self) -> Cow { - debug!("Arg::name_no_brackets:{}", self.name); - let delim = if self.is_require_value_delimiter_set() { - self.val_delim.expect(INTERNAL_ERROR_MSG) + /// Write the values such as + fn render_arg_val(&self, required: bool) -> String { + let mut rendered = String::new(); + + let num_vals = self.get_num_args().expect(INTERNAL_ERROR_MSG); + + let mut val_names = if self.val_names.is_empty() { + vec![self.id.as_internal_str().to_owned()] } else { - ' ' + self.val_names.clone() + }; + if val_names.len() == 1 { + let min = num_vals.min_values().max(1); + let val_name = val_names.pop().unwrap(); + val_names = vec![val_name; min]; } - .to_string(); - if !self.val_names.is_empty() { - debug!("Arg::name_no_brackets: val_names={:#?}", self.val_names); - if self.val_names.len() > 1 { - Cow::Owned( - self.val_names - .iter() - .map(|n| format!("<{}>", n)) - .collect::>() - .join(&*delim), - ) + debug_assert!(self.is_takes_value_set()); + for (n, val_name) in val_names.iter().enumerate() { + let arg_name = if self.is_positional() && (num_vals.min_values() == 0 || !required) { + format!("[{}]", val_name) } else { - Cow::Borrowed(self.val_names.get(0).expect(INTERNAL_ERROR_MSG)) + format!("<{}>", val_name) + }; + + if n != 0 { + rendered.push(' '); } - } else { - debug!("Arg::name_no_brackets: just name"); - Cow::Borrowed(self.name) + rendered.push_str(&arg_name); + } + + let mut extra_values = false; + extra_values |= val_names.len() < num_vals.max_values(); + if self.is_positional() && matches!(*self.get_action(), ArgAction::Append) { + extra_values = true; } + if extra_values { + rendered.push_str("..."); + } + + rendered } /// Either multiple values or occurrences pub(crate) fn is_multiple(&self) -> bool { - self.is_multiple_values_set() | self.is_multiple_occurrences_set() + self.is_multiple_values_set() || matches!(*self.get_action(), ArgAction::Append) } + #[cfg(feature = "help")] pub(crate) fn get_display_order(&self) -> usize { - self.disp_ord.get_explicit() + self.disp_ord.unwrap_or(999) } } -impl<'help> From<&'_ Arg<'help>> for Arg<'help> { - fn from(a: &Arg<'help>) -> Self { +impl From<&'_ Arg> for Arg { + fn from(a: &Arg) -> Self { a.clone() } } -impl<'help> PartialEq for Arg<'help> { - fn eq(&self, other: &Arg<'help>) -> bool { - self.name == other.name +impl PartialEq for Arg { + fn eq(&self, other: &Arg) -> bool { + self.get_id() == other.get_id() } } -impl<'help> PartialOrd for Arg<'help> { +impl PartialOrd for Arg { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl<'help> Ord for Arg<'help> { +impl Ord for Arg { fn cmp(&self, other: &Arg) -> Ordering { - self.name.cmp(other.name) + self.get_id().cmp(other.get_id()) } } -impl<'help> Eq for Arg<'help> {} +impl Eq for Arg {} -impl<'help> Display for Arg<'help> { +impl Display for Arg { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - // Write the name such --long or -l - if let Some(l) = self.long { - write!(f, "--{}", l)?; - } else if let Some(s) = self.short { - write!(f, "-{}", s)?; - } - let mut need_closing_bracket = false; - if !self.is_positional() && self.is_takes_value_set() { - let is_optional_val = self.min_vals == Some(0); - let sep = if self.is_require_equals_set() { - if is_optional_val { - need_closing_bracket = true; - "[=" - } else { - "=" - } - } else if is_optional_val { - need_closing_bracket = true; - " [" - } else { - " " - }; - f.write_str(sep)?; - } - if self.is_takes_value_set() || self.is_positional() { - display_arg_val(self, |s, _| f.write_str(s))?; - } - if need_closing_bracket { - f.write_str("]")?; - } - - Ok(()) + self.stylized(None).fmt(f) } } -impl<'help> fmt::Debug for Arg<'help> { +impl fmt::Debug for Arg { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { let mut ds = f.debug_struct("Arg"); #[allow(unused_mut)] let mut ds = ds .field("id", &self.id) - .field("provider", &self.provider) - .field("name", &self.name) .field("help", &self.help) .field("long_help", &self.long_help) .field("action", &self.action) @@ -5277,19 +4288,8 @@ .field("aliases", &self.aliases) .field("short_aliases", &self.short_aliases) .field("disp_ord", &self.disp_ord) - .field("possible_vals", &self.possible_vals) .field("val_names", &self.val_names) .field("num_vals", &self.num_vals) - .field("max_vals", &self.max_vals) - .field("min_vals", &self.min_vals) - .field( - "validator", - &self.validator.as_ref().map_or("None", |_| "Some(FnMut)"), - ) - .field( - "validator_os", - &self.validator_os.as_ref().map_or("None", |_| "Some(FnMut)"), - ) .field("val_delim", &self.val_delim) .field("default_vals", &self.default_vals) .field("default_vals_ifs", &self.default_vals_ifs) @@ -5308,183 +4308,76 @@ } } -type Validator<'a> = dyn FnMut(&str) -> Result<(), Box> + Send + 'a; -type ValidatorOs<'a> = dyn FnMut(&OsStr) -> Result<(), Box> + Send + 'a; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum ArgProvider { - Generated, - GeneratedMutated, - User, -} - -impl Default for ArgProvider { - fn default() -> Self { - ArgProvider::User - } -} - -/// Write the values such as -pub(crate) fn display_arg_val(arg: &Arg, mut write: F) -> Result<(), E> -where - F: FnMut(&str, bool) -> Result, -{ - let mult_val = arg.is_multiple_values_set(); - let mult_occ = arg.is_multiple_occurrences_set(); - let delim = if arg.is_require_value_delimiter_set() { - arg.val_delim.expect(INTERNAL_ERROR_MSG) - } else { - ' ' - } - .to_string(); - if !arg.val_names.is_empty() { - // If have val_name. - match (arg.val_names.len(), arg.num_vals) { - (1, Some(num_vals)) => { - // If single value name with multiple num_of_vals, display all - // the values with the single value name. - let arg_name = format!("<{}>", arg.val_names.get(0).unwrap()); - for n in 1..=num_vals { - write(&arg_name, true)?; - if n != num_vals { - write(&delim, false)?; - } - } - } - (num_val_names, _) => { - // If multiple value names, display them sequentially(ignore num of vals). - let mut it = arg.val_names.iter().peekable(); - while let Some(val) = it.next() { - write(&format!("<{}>", val), true)?; - if it.peek().is_some() { - write(&delim, false)?; - } - } - if (num_val_names == 1 && mult_val) - || (arg.is_positional() && mult_occ) - || num_val_names < arg.num_vals.unwrap_or(0) - { - write("...", true)?; - } - } - } - } else if let Some(num_vals) = arg.num_vals { - // If number_of_values is specified, display the value multiple times. - let arg_name = format!("<{}>", arg.name); - for n in 1..=num_vals { - write(&arg_name, true)?; - if n != num_vals { - write(&delim, false)?; - } - } - } else if arg.is_positional() { - // Value of positional argument with no num_vals and val_names. - write(&format!("<{}>", arg.name), true)?; - - if mult_val || mult_occ { - write("...", true)?; - } - } else { - // value of flag argument with no num_vals and val_names. - write(&format!("<{}>", arg.name), true)?; - if mult_val { - write("...", true)?; - } - } - Ok(()) -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub(crate) enum DisplayOrder { - None, - Implicit(usize), - Explicit(usize), -} - -impl DisplayOrder { - pub(crate) fn set_explicit(&mut self, explicit: usize) { - *self = Self::Explicit(explicit) - } - - pub(crate) fn set_implicit(&mut self, implicit: usize) { - *self = (*self).max(Self::Implicit(implicit)) - } - - pub(crate) fn make_explicit(&mut self) { - match *self { - Self::None | Self::Explicit(_) => {} - Self::Implicit(disp) => self.set_explicit(disp), - } - } - - pub(crate) fn get_explicit(self) -> usize { - match self { - Self::None | Self::Implicit(_) => 999, - Self::Explicit(disp) => disp, - } - } -} - -impl Default for DisplayOrder { - fn default() -> Self { - Self::None - } -} - // Flags #[cfg(test)] mod test { use super::Arg; + use super::ArgAction; #[test] - fn flag_display() { - let mut f = Arg::new("flg").multiple_occurrences(true); - f.long = Some("flag"); + fn flag_display_long() { + let mut f = Arg::new("flg").long("flag").action(ArgAction::SetTrue); + f._build(); assert_eq!(f.to_string(), "--flag"); + } - let mut f2 = Arg::new("flg"); - f2.short = Some('f'); + #[test] + fn flag_display_short() { + let mut f2 = Arg::new("flg").short('f').action(ArgAction::SetTrue); + f2._build(); assert_eq!(f2.to_string(), "-f"); } #[test] + fn flag_display_count() { + let mut f2 = Arg::new("flg").long("flag").action(ArgAction::Count); + f2._build(); + + assert_eq!(f2.to_string(), "--flag..."); + } + + #[test] fn flag_display_single_alias() { - let mut f = Arg::new("flg"); - f.long = Some("flag"); - f.aliases = vec![("als", true)]; + let mut f = Arg::new("flg") + .long("flag") + .visible_alias("als") + .action(ArgAction::SetTrue); + f._build(); assert_eq!(f.to_string(), "--flag") } #[test] fn flag_display_multiple_aliases() { - let mut f = Arg::new("flg"); - f.short = Some('f'); + let mut f = Arg::new("flg").short('f').action(ArgAction::SetTrue); f.aliases = vec![ - ("alias_not_visible", false), - ("f2", true), - ("f3", true), - ("f4", true), + ("alias_not_visible".into(), false), + ("f2".into(), true), + ("f3".into(), true), + ("f4".into(), true), ]; + f._build(); + assert_eq!(f.to_string(), "-f"); } #[test] fn flag_display_single_short_alias() { - let mut f = Arg::new("flg"); - f.short = Some('a'); + let mut f = Arg::new("flg").short('a').action(ArgAction::SetTrue); f.short_aliases = vec![('b', true)]; + f._build(); assert_eq!(f.to_string(), "-a") } #[test] fn flag_display_multiple_short_aliases() { - let mut f = Arg::new("flg"); - f.short = Some('a'); + let mut f = Arg::new("flg").short('a').action(ArgAction::SetTrue); f.short_aliases = vec![('b', false), ('c', true), ('d', true), ('e', true)]; + f._build(); + assert_eq!(f.to_string(), "-a"); } @@ -5492,80 +4385,145 @@ #[test] fn option_display_multiple_occurrences() { - let o = Arg::new("opt") - .long("option") - .takes_value(true) - .multiple_occurrences(true); + let mut o = Arg::new("opt").long("option").action(ArgAction::Append); + o._build(); assert_eq!(o.to_string(), "--option "); } #[test] fn option_display_multiple_values() { - let o = Arg::new("opt") + let mut o = Arg::new("opt") .long("option") - .takes_value(true) - .multiple_values(true); + .action(ArgAction::Set) + .num_args(1..); + o._build(); assert_eq!(o.to_string(), "--option ..."); } #[test] - fn option_display2() { - let o2 = Arg::new("opt").short('o').value_names(&["file", "name"]); + fn option_display_zero_or_more_values() { + let mut o = Arg::new("opt") + .long("option") + .action(ArgAction::Set) + .num_args(0..); + o._build(); - assert_eq!(o2.to_string(), "-o "); + assert_eq!(o.to_string(), "--option [...]"); + } + + #[test] + fn option_display_one_or_more_values() { + let mut o = Arg::new("opt") + .long("option") + .action(ArgAction::Set) + .num_args(1..); + o._build(); + + assert_eq!(o.to_string(), "--option ..."); + } + + #[test] + fn option_display_zero_or_more_values_with_value_name() { + let mut o = Arg::new("opt") + .short('o') + .action(ArgAction::Set) + .num_args(0..) + .value_names(["file"]); + o._build(); + + assert_eq!(o.to_string(), "-o [...]"); + } + + #[test] + fn option_display_one_or_more_values_with_value_name() { + let mut o = Arg::new("opt") + .short('o') + .action(ArgAction::Set) + .num_args(1..) + .value_names(["file"]); + o._build(); + + assert_eq!(o.to_string(), "-o ..."); + } + + #[test] + fn option_display_optional_value() { + let mut o = Arg::new("opt") + .long("option") + .action(ArgAction::Set) + .num_args(0..=1); + o._build(); + + assert_eq!(o.to_string(), "--option []"); + } + + #[test] + fn option_display_value_names() { + let mut o = Arg::new("opt") + .short('o') + .action(ArgAction::Set) + .value_names(["file", "name"]); + o._build(); + + assert_eq!(o.to_string(), "-o "); } #[test] fn option_display3() { - let o2 = Arg::new("opt") + let mut o = Arg::new("opt") .short('o') - .takes_value(true) - .multiple_values(true) - .value_names(&["file", "name"]); + .num_args(1..) + .action(ArgAction::Set) + .value_names(["file", "name"]); + o._build(); - assert_eq!(o2.to_string(), "-o "); + assert_eq!(o.to_string(), "-o ..."); } #[test] fn option_display_single_alias() { - let o = Arg::new("opt") - .takes_value(true) + let mut o = Arg::new("opt") .long("option") + .action(ArgAction::Set) .visible_alias("als"); + o._build(); assert_eq!(o.to_string(), "--option "); } #[test] fn option_display_multiple_aliases() { - let o = Arg::new("opt") + let mut o = Arg::new("opt") .long("option") - .takes_value(true) - .visible_aliases(&["als2", "als3", "als4"]) + .action(ArgAction::Set) + .visible_aliases(["als2", "als3", "als4"]) .alias("als_not_visible"); + o._build(); assert_eq!(o.to_string(), "--option "); } #[test] fn option_display_single_short_alias() { - let o = Arg::new("opt") - .takes_value(true) + let mut o = Arg::new("opt") .short('a') + .action(ArgAction::Set) .visible_short_alias('b'); + o._build(); assert_eq!(o.to_string(), "-a "); } #[test] fn option_display_multiple_short_aliases() { - let o = Arg::new("opt") + let mut o = Arg::new("opt") .short('a') - .takes_value(true) - .visible_short_aliases(&['b', 'c', 'd']) + .action(ArgAction::Set) + .visible_short_aliases(['b', 'c', 'd']) .short_alias('e'); + o._build(); assert_eq!(o.to_string(), "-a "); } @@ -5574,45 +4532,109 @@ #[test] fn positional_display_multiple_values() { - let p = Arg::new("pos") - .index(1) - .takes_value(true) - .multiple_values(true); + let mut p = Arg::new("pos").index(1).num_args(1..); + p._build(); + + assert_eq!(p.to_string(), "[pos]..."); + } + + #[test] + fn positional_display_multiple_values_required() { + let mut p = Arg::new("pos").index(1).num_args(1..).required(true); + p._build(); assert_eq!(p.to_string(), "..."); } #[test] + fn positional_display_zero_or_more_values() { + let mut p = Arg::new("pos").index(1).num_args(0..); + p._build(); + + assert_eq!(p.to_string(), "[pos]..."); + } + + #[test] + fn positional_display_one_or_more_values() { + let mut p = Arg::new("pos").index(1).num_args(1..); + p._build(); + + assert_eq!(p.to_string(), "[pos]..."); + } + + #[test] + fn positional_display_one_or_more_values_required() { + let mut p = Arg::new("pos").index(1).num_args(1..).required(true); + p._build(); + + assert_eq!(p.to_string(), "..."); + } + + #[test] + fn positional_display_optional_value() { + let mut p = Arg::new("pos") + .index(1) + .num_args(0..=1) + .action(ArgAction::Set); + p._build(); + + assert_eq!(p.to_string(), "[pos]"); + } + + #[test] fn positional_display_multiple_occurrences() { - let p = Arg::new("pos") + let mut p = Arg::new("pos").index(1).action(ArgAction::Append); + p._build(); + + assert_eq!(p.to_string(), "[pos]..."); + } + + #[test] + fn positional_display_multiple_occurrences_required() { + let mut p = Arg::new("pos") .index(1) - .takes_value(true) - .multiple_occurrences(true); + .action(ArgAction::Append) + .required(true); + p._build(); assert_eq!(p.to_string(), "..."); } #[test] fn positional_display_required() { - let p2 = Arg::new("pos").index(1).required(true); + let mut p = Arg::new("pos").index(1).required(true); + p._build(); - assert_eq!(p2.to_string(), ""); + assert_eq!(p.to_string(), ""); } #[test] fn positional_display_val_names() { - let p2 = Arg::new("pos").index(1).value_names(&["file1", "file2"]); + let mut p = Arg::new("pos").index(1).value_names(["file1", "file2"]); + p._build(); + + assert_eq!(p.to_string(), "[file1] [file2]"); + } + + #[test] + fn positional_display_val_names_required() { + let mut p = Arg::new("pos") + .index(1) + .value_names(["file1", "file2"]) + .required(true); + p._build(); - assert_eq!(p2.to_string(), " "); + assert_eq!(p.to_string(), " "); } #[test] fn positional_display_val_names_req() { - let p2 = Arg::new("pos") + let mut p = Arg::new("pos") .index(1) .required(true) - .value_names(&["file1", "file2"]); + .value_names(["file1", "file2"]); + p._build(); - assert_eq!(p2.to_string(), " "); + assert_eq!(p.to_string(), " "); } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_settings.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_settings.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_settings.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/arg_settings.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,9 +1,5 @@ -#![allow(deprecated)] - // Std use std::ops::BitOr; -#[cfg(feature = "yaml")] -use std::str::FromStr; // Third party use bitflags::bitflags; @@ -11,9 +7,8 @@ #[allow(unused)] use crate::Arg; -#[doc(hidden)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct ArgFlags(Flags); +pub(crate) struct ArgFlags(Flags); impl Default for ArgFlags { fn default() -> Self { @@ -31,481 +26,36 @@ /// [`Arg::is_set`]: crate::Arg::is_set() #[derive(Debug, PartialEq, Copy, Clone)] #[non_exhaustive] -pub enum ArgSettings { - /// Deprecated, replaced with [`Arg::required`] and [`Arg::is_required_set`] - /// - /// Derive: replace `#[clap(setting = Required)]` with `#[clap(required = true)]` - /// - /// Builder: replace `arg.setting(Required)` with `arg.required(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::required` and `Arg::is_required_set` - -Derive: replace `#[clap(setting = Required)]` with `#[clap(required = true)]` - -Builder: replace `arg.setting(Required)` with `arg.required(true)` -" - ) - )] +pub(crate) enum ArgSettings { Required, - /// Deprecated, replaced with [`Arg::multiple_values`] and [`Arg::is_multiple_values_set`] - /// - /// Derive: replace `#[clap(setting = MultipleValues)]` with `#[clap(multiple_values = true)]` - /// - /// Builder: replace `arg.setting(MultipleValues)` with `arg.multiple_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::multiple_values` and `Arg::`is_multiple_values_set` - -Derive: replace `#[clap(setting = MultipleValues)]` with `#[clap(multiple_values = true)]` - -Builder: replace `arg.setting(MultipleValues)` with `arg.multiple_values(true)` -" - ) - )] - MultipleValues, - /// Deprecated, replaced with [`Arg::action`] ([Issue #3772](https://github.com/clap-rs/clap/issues/3772)) - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::action` (Issue #3772) - -Builder: replace `arg.setting(MultipleOccurrences)` with `arg.action(ArgAction::Append)` when taking a value and `arg.action(ArgAction::Count)` with `matches.get_count` when not -" - ) - )] - MultipleOccurrences, - /// Deprecated, see [`ArgSettings::MultipleOccurrences`] (most likely what you want) and - /// [`ArgSettings::MultipleValues`] - /// - /// Derive: replace `#[clap(setting = Multiple)]` with `#[clap(multiple_values = true, multiple_occurrences = true)]` - /// - /// Builder: replace `arg.setting(Multiple)` with `arg.multiple_values(true).multiple_occurrences(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Split into `Arg::multiple_occurrences` (most likely what you want) and `Arg::multiple_values` - -Derive: replace `#[clap(setting = Multiple)]` with `#[clap(multiple_values = true, multiple_occurrences = true)]` - -Builder: replace `arg.setting(Multiple)` with `arg.multiple_values(true).multiple_occurrences(true)` -" - ) - )] - #[doc(hidden)] - Multiple, - /// Deprecated, replaced with [`Arg::value_parser(NonEmptyStringValueParser::new())`] - /// - /// Derive: replace `#[clap(setting = ForbidEmptyValues)]` with `#[clap(value_parser = NonEmptyStringValueParser::new())]` - /// - /// Builder: replace `arg.setting(Multiple)` with `arg.value_parser(NonEmptyStringValueParser::new())` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::value_parser(NonEmptyStringValueParser::new())` - -Derive: replace `#[clap(setting = ForbidEmptyValues)]` with `#[clap(value_parser = NonEmptyStringValueParser::new())]` - -Builder: replace `arg.setting(Multiple)` with `arg.value_parser(NonEmptyStringValueParser::new())` -" - ) - )] - ForbidEmptyValues, - /// Deprecated, replaced with [`Arg::global`] and [`Arg::is_global_set`] - /// - /// Derive: replace `#[clap(setting = Global)]` with `#[clap(global = true)]` - /// - /// Builder: replace `arg.setting(Global)` with `arg.global(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::global` and `Arg::is_global_set` - -Derive: replace `#[clap(setting = Global)]` with `#[clap(global = true)]` - -Builder: replace `arg.setting(Global)` with `arg.global(true)` -" - ) - )] Global, - /// Deprecated, replaced with [`Arg::hide`] and [`Arg::is_hide_set`] - /// - /// Derive: replace `#[clap(setting = Hidden)]` with `#[clap(hide = true)]` - /// - /// Builder: replace `arg.setting(Hidden)` with `arg.hide(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide` and `Arg::is_hide_set` - -Derive: replace `#[clap(setting = Hidden)]` with `#[clap(hide = true)]` - -Builder: replace `arg.setting(Hidden)` with `arg.hide(true)` -" - ) - )] Hidden, - /// Deprecated, replaced with [`Arg::takes_value`] and [`Arg::is_takes_value_set`] - /// - /// Derive: this setting shouldn't be needed - /// - /// Builder: replace `arg.setting(TakesValue)` with `arg.takes_value(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::takes_value` and `Arg::is_takes_value_set` - -Derive: this setting shouldn't be needed - -Builder: replace `arg.setting(TakesValue)` with `arg.takes_value(true)` -" - ) - )] - TakesValue, - /// Deprecated, replaced with [`Arg::use_value_delimiter`] and - /// [`Arg::is_use_value_delimiter_set`] - /// - /// Derive: replace `#[clap(setting = UseValueDelimiter)]` with `#[clap(use_value_delimiter = true)]` - /// - /// Builder: replace `arg.setting(UseValueDelimiter)` with `arg.use_value_delimiter(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::use_value_delimiter` and `Arg::is_use_value_delimiter_set` - -Derive: replace `#[clap(setting = UseValueDelimiter)]` with `#[clap(use_value_delimiter = true)]` - -Builder: replace `arg.setting(UseValueDelimiter)` with `arg.use_value_delimiter(true)` -" - ) - )] - UseValueDelimiter, - /// Deprecated, replaced with [`Arg::next_line_help`] and [`Arg::is_next_line_help_set`] - /// - /// Derive: replace `#[clap(setting = NextLineHelp)]` with `#[clap(next_line_help = true)]` - /// - /// Builder: replace `arg.setting(NextLineHelp)` with `arg.next_line_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::next_line_help` and `Arg::is_next_line_help_set` - -Derive: replace `#[clap(setting = NextLineHelp)]` with `#[clap(next_line_help = true)]` - -Builder: replace `arg.setting(NextLineHelp)` with `arg.next_line_help(true)` -" - ) - )] NextLineHelp, - /// Deprecated, replaced with [`Arg::require_value_delimiter`] and - /// [`Arg::is_require_value_delimiter_set`] - /// - /// Derive: replace `#[clap(setting = RequireDelimiter)]` with `#[clap(require_value_delimiter = true)]` - /// - /// Builder: replace `arg.setting(RequireDelimiter)` with `arg.require_value_delimiter(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::require_value_delimiter` and `Arg::is_require_value_delimiter_set` - -Derive: replace `#[clap(setting = RequireDelimiter)]` with `#[clap(require_value_delimiter = true)]` - -Builder: replace `arg.setting(RequireDelimiter)` with `arg.require_value_delimiter(true)` -" - ) - )] - RequireDelimiter, - /// Deprecated, replaced with [`Arg::hide_possible_values`] and - /// [`Arg::is_hide_possible_values_set`] - /// - /// Derive: replace `#[clap(setting = HidePossibleValues)]` with `#[clap(hide_possible_values = true)]` - /// - /// Builder: replace `arg.setting(HidePossibleValues)` with `arg.hide_possible_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide_possible_values` and `Arg::is_hide_possible_values_set` - -Derive: replace `#[clap(setting = HidePossibleValues)]` with `#[clap(hide_possible_values = true)]` - -Builder: replace `arg.setting(HidePossibleValues)` with `arg.hide_possible_values(true)` -" - ) - )] HidePossibleValues, - /// Deprecated, replaced with [`Arg::allow_hyphen_values`] and - /// [`Arg::is_allow_hyphen_values_set`] - /// - /// Derive: replace `#[clap(setting = AllowHyphenValues)]` with `#[clap(allow_hyphen_values = true)]` - /// - /// Builder: replace `arg.setting(AllowHyphenValues)` with `arg.allow_hyphen_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::allow_hyphen_values` and `Arg::is_allow_hyphen_values_set` - -Derive: replace `#[clap(setting = AllowHyphenValues)]` with `#[clap(allow_hyphen_values = true)]` - -Builder: replace `arg.setting(AllowHyphenValues)` with `arg.allow_hyphen_values(true)` -" - ) - )] AllowHyphenValues, - /// Deprecated, replaced with [`Arg::allow_hyphen_values`] and - /// [`Arg::is_allow_hyphen_values_set`] - /// - /// Derive: replace `#[clap(setting = AllowLeadingHyphen)]` with `#[clap(allow_hyphen_values = true)]` - /// - /// Builder: replace `arg.setting(AllowLeadingHyphen)` with `arg.allow_hyphen_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Arg::allow_hyphen_values` and `Arg::is_allow_hyphen_values_set` - -Derive: replace `#[clap(setting = AllowLeadingHyphen)]` with `#[clap(allow_hyphen_values = true)]` - -Builder: replace `arg.setting(AllowLeadingHyphen)` with `arg.allow_hyphen_values(true)` -" - ) - )] - #[doc(hidden)] - AllowLeadingHyphen, - /// Deprecated, replaced with [`Arg::require_equals`] and [`Arg::is_require_equals_set`] - /// - /// Derive: replace `#[clap(setting = RequireEquals)]` with `#[clap(require_equals = true)]` - /// - /// Builder: replace `arg.setting(RequireEquals)` with `arg.require_equals(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::require_equals` and `Arg::is_require_equals_set` - -Derive: replace `#[clap(setting = RequireEquals)]` with `#[clap(require_equals = true)]` - -Builder: replace `arg.setting(RequireEquals)` with `arg.require_equals(true)` -" - ) - )] + AllowNegativeNumbers, RequireEquals, - /// Deprecated, replaced with [`Arg::last`] and [`Arg::is_last_set`] - /// - /// Derive: replace `#[clap(setting = Last)]` with `#[clap(last = true)]` - /// - /// Builder: replace `arg.setting(Last)` with `arg.last(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::last` and `Arg::is_last_set` - -Derive: replace `#[clap(setting = Last)]` with `#[clap(last = true)]` - -Builder: replace `arg.setting(Last)` with `arg.last(true)` -" - ) - )] Last, - /// Deprecated, replaced with [`Arg::hide_default_value`] and [`Arg::is_hide_default_value_set`] - /// - /// Derive: replace `#[clap(setting = HideDefaultValue)]` with `#[clap(hide_default_value = true)]` - /// - /// Builder: replace `arg.setting(HideDefaultValue)` with `arg.hide_default_value(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide_default_value` and `Arg::is_hide_default_value_set` - -Derive: replace `#[clap(setting = HideDefaultValue)]` with `#[clap(hide_default_value = true)]` - -Builder: replace `arg.setting(HideDefaultValue)` with `arg.hide_default_value(true)` -" - ) - )] + TrailingVarArg, HideDefaultValue, - /// Deprecated, replaced with [`Arg::ignore_case`] and [`Arg::is_ignore_case_set`] - /// - /// Derive: replace `#[clap(setting = IgnoreCase)]` with `#[clap(ignore_case = true)]` - /// - /// Builder: replace `arg.setting(IgnoreCase)` with `arg.ignore_case(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::ignore_case` and `Arg::is_ignore_case_set` - -Derive: replace `#[clap(setting = IgnoreCase)]` with `#[clap(ignore_case = true)]` - -Builder: replace `arg.setting(IgnoreCase)` with `arg.ignore_case(true)` -" - ) - )] IgnoreCase, - /// Deprecated, replaced with [`Arg::ignore_case`] and [`Arg::is_ignore_case_set`] - /// - /// Derive: replace `#[clap(setting = CaseInsensitive)]` with `#[clap(ignore_case = true)]` - /// - /// Builder: replace `arg.setting(CaseInsensitive)` with `arg.ignore_case(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `Arg::ignore_case` and `Arg::is_ignore_case_set` - -Derive: replace `#[clap(setting = CaseInsensitive)]` with `#[clap(ignore_case = true)]` - -Builder: replace `arg.setting(CaseInsensitive)` with `arg.ignore_case(true)` -" - ) - )] - #[doc(hidden)] - CaseInsensitive, - /// Deprecated, replaced with [`Arg::hide_env`] and [`Arg::is_hide_env_set`] - /// - /// Derive: replace `#[clap(setting = HideEnv)]` with `#[clap(hide_env = true)]` - /// - /// Builder: replace `arg.setting(HideEnv)` with `arg.hide_env(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide_env` and `Arg::is_hide_env_set` - -Derive: replace `#[clap(setting = HideEnv)]` with `#[clap(hide_env = true)]` - -Builder: replace `arg.setting(HideEnv)` with `arg.hide_env(true)` -" - ) - )] #[cfg(feature = "env")] HideEnv, - /// Deprecated, replaced with [`Arg::hide_env_values`] and [`Arg::is_hide_env_values_set`] - /// - /// Derive: replace `#[clap(setting = HideEnvValues)]` with `#[clap(hide_env_values = true)]` - /// - /// Builder: replace `arg.setting(HideEnvValues)` with `arg.hide_env_values(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide_env_values` and `Arg::is_hide_env_values_set` - -Derive: replace `#[clap(setting = HideEnvValues)]` with `#[clap(hide_env_values = true)]` - -Builder: replace `arg.setting(HideEnvValues)` with `arg.hide_env_values(true)` -" - ) - )] #[cfg(feature = "env")] HideEnvValues, - /// Deprecated, replaced with [`Arg::hide_short_help`] and [`Arg::is_hide_short_help_set`] - /// - /// Derive: replace `#[clap(setting = HiddenShortHelp)]` with `#[clap(hide_short_help = true)]` - /// - /// Builder: replace `arg.setting(HiddenShortHelp)` with `arg.hide_short_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide_short_help` and `Arg::is_hide_short_help_set` - -Derive: replace `#[clap(setting = HiddenShortHelp)]` with `#[clap(hide_short_help = true)]` - -Builder: replace `arg.setting(HiddenShortHelp)` with `arg.hide_short_help(true)` -" - ) - )] HiddenShortHelp, - /// Deprecated, replaced with [`Arg::hide_long_help`] and [`Arg::is_hide_long_help_set`] - /// - /// Derive: replace `#[clap(setting = HiddenLongHelp)]` with `#[clap(hide_long_help = true)]` - /// - /// Builder: replace `arg.setting(HiddenLongHelp)` with `arg.hide_long_help(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::hide_long_help` and `Arg::is_hide_long_help_set` - -Derive: replace `#[clap(setting = HiddenLongHelp)]` with `#[clap(hide_long_help = true)]` - -Builder: replace `arg.setting(HiddenLongHelp)` with `arg.hide_long_help(true)` -" - ) - )] HiddenLongHelp, - /// Deprecated, replaced with [`Arg::value_parser`] - /// - /// Derive: replace `#[clap(setting = AllowInvalidUtf8)]` with `#[clap(action)]` (which opts-in to the - /// new clap v4 behavior which gets the type via `value_parser!`) - /// - /// Builder: replace `arg.setting(AllowInvalidUtf8)` with `arg.value_parser(value_parser!(T))` where - /// `T` is the type of interest, like `OsString` or `PathBuf`, and `matches.value_of_os` with - /// `matches.get_one::` or `matches.values_of_os` with `matches.get_many::` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `value_parser` - -Derive: replace `#[clap(setting = AllowInvalidUtf8)]` with `#[clap(action)]` (which opts-in to the -new clap v4 behavior which gets the type via `value_parser!`) - -Builder: replace `arg.setting(AllowInvalidUtf8)` with `arg.value_parser(value_parser!(T))` where -`T` is the type of interest, like `OsString` or `PathBuf`, and `matches.value_of_os` with -`matches.get_one::` or `matches.values_of_os` with `matches.get_many::` -" - ) - )] - AllowInvalidUtf8, - /// Deprecated, replaced with [`Arg::exclusive`] and [`Arg::is_exclusive_set`] - /// - /// Derive: replace `#[clap(setting = Exclusive)]` with `#[clap(exclusive = true)]` - /// - /// Builder: replace `arg.setting(Exclusive)` with `arg.exclusive(true)` - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `Arg::exclusive` and `Arg::is_exclusive_set` - -Derive: replace `#[clap(setting = Exclusive)]` with `#[clap(exclusive = true)]` - -Builder: replace `arg.setting(Exclusive)` with `arg.exclusive(true)` -" - ) - )] Exclusive, } bitflags! { struct Flags: u32 { const REQUIRED = 1; - const MULTIPLE_OCC = 1 << 1; - const NO_EMPTY_VALS = 1 << 2; const GLOBAL = 1 << 3; const HIDDEN = 1 << 4; - const TAKES_VAL = 1 << 5; - const USE_DELIM = 1 << 6; + const TRAILING_VARARG = 1 << 5; + const ALLOW_NEG_NUMS = 1 << 6; const NEXT_LINE_HELP = 1 << 7; - const REQ_DELIM = 1 << 9; const DELIM_NOT_SET = 1 << 10; const HIDE_POS_VALS = 1 << 11; const ALLOW_TAC_VALS = 1 << 12; @@ -517,11 +67,8 @@ const HIDE_ENV_VALS = 1 << 17; const HIDDEN_SHORT_H = 1 << 18; const HIDDEN_LONG_H = 1 << 19; - const MULTIPLE_VALS = 1 << 20; - const MULTIPLE = Self::MULTIPLE_OCC.bits | Self::MULTIPLE_VALS.bits; #[cfg(feature = "env")] const HIDE_ENV = 1 << 21; - const UTF8_NONE = 1 << 22; const EXCLUSIVE = 1 << 23; const NO_OP = 0; } @@ -529,23 +76,16 @@ impl_settings! { ArgSettings, ArgFlags, Required => Flags::REQUIRED, - MultipleOccurrences => Flags::MULTIPLE_OCC, - MultipleValues => Flags::MULTIPLE_VALS, - Multiple => Flags::MULTIPLE, - ForbidEmptyValues => Flags::NO_EMPTY_VALS, Global => Flags::GLOBAL, Hidden => Flags::HIDDEN, - TakesValue => Flags::TAKES_VAL, - UseValueDelimiter => Flags::USE_DELIM, NextLineHelp => Flags::NEXT_LINE_HELP, - RequireDelimiter => Flags::REQ_DELIM, HidePossibleValues => Flags::HIDE_POS_VALS, AllowHyphenValues => Flags::ALLOW_TAC_VALS, - AllowLeadingHyphen => Flags::ALLOW_TAC_VALS, + AllowNegativeNumbers => Flags::ALLOW_NEG_NUMS, RequireEquals => Flags::REQUIRE_EQUALS, Last => Flags::LAST, + TrailingVarArg => Flags::TRAILING_VARARG, IgnoreCase => Flags::CASE_INSENSITIVE, - CaseInsensitive => Flags::CASE_INSENSITIVE, #[cfg(feature = "env")] HideEnv => Flags::HIDE_ENV, #[cfg(feature = "env")] @@ -553,132 +93,53 @@ HideDefaultValue => Flags::HIDE_DEFAULT_VAL, HiddenShortHelp => Flags::HIDDEN_SHORT_H, HiddenLongHelp => Flags::HIDDEN_LONG_H, - AllowInvalidUtf8 => Flags::UTF8_NONE, Exclusive => Flags::EXCLUSIVE } -/// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? -#[cfg(feature = "yaml")] -impl FromStr for ArgSettings { - type Err = String; - fn from_str(s: &str) -> Result::Err> { - #[allow(deprecated)] - #[allow(unreachable_patterns)] - match &*s.to_ascii_lowercase() { - "required" => Ok(ArgSettings::Required), - "multipleoccurrences" => Ok(ArgSettings::MultipleOccurrences), - "multiplevalues" => Ok(ArgSettings::MultipleValues), - "multiple" => Ok(ArgSettings::Multiple), - "forbidemptyvalues" => Ok(ArgSettings::ForbidEmptyValues), - "global" => Ok(ArgSettings::Global), - "hidden" => Ok(ArgSettings::Hidden), - "takesvalue" => Ok(ArgSettings::TakesValue), - "usevaluedelimiter" => Ok(ArgSettings::UseValueDelimiter), - "nextlinehelp" => Ok(ArgSettings::NextLineHelp), - "requiredelimiter" => Ok(ArgSettings::RequireDelimiter), - "hidepossiblevalues" => Ok(ArgSettings::HidePossibleValues), - "allowhyphenvalues" => Ok(ArgSettings::AllowHyphenValues), - "allowleadinghypyhen" => Ok(ArgSettings::AllowLeadingHyphen), - "requireequals" => Ok(ArgSettings::RequireEquals), - "last" => Ok(ArgSettings::Last), - "ignorecase" => Ok(ArgSettings::IgnoreCase), - "caseinsensitive" => Ok(ArgSettings::CaseInsensitive), - #[cfg(feature = "env")] - "hideenv" => Ok(ArgSettings::HideEnv), - #[cfg(feature = "env")] - "hideenvvalues" => Ok(ArgSettings::HideEnvValues), - "hidedefaultvalue" => Ok(ArgSettings::HideDefaultValue), - "hiddenshorthelp" => Ok(ArgSettings::HiddenShortHelp), - "hiddenlonghelp" => Ok(ArgSettings::HiddenLongHelp), - "allowinvalidutf8" => Ok(ArgSettings::AllowInvalidUtf8), - "exclusive" => Ok(ArgSettings::Exclusive), - _ => Err(format!("unknown AppSetting: `{}`", s)), - } - } -} - #[cfg(test)] mod test { + use super::*; + use crate::Arg; + #[test] - #[cfg(feature = "yaml")] - fn arg_settings_fromstr() { - use super::ArgSettings; - - assert_eq!( - "allowhyphenvalues".parse::().unwrap(), - ArgSettings::AllowHyphenValues - ); - assert_eq!( - "forbidemptyvalues".parse::().unwrap(), - ArgSettings::ForbidEmptyValues - ); - assert_eq!( - "hidepossiblevalues".parse::().unwrap(), - ArgSettings::HidePossibleValues - ); - assert_eq!( - "hidden".parse::().unwrap(), - ArgSettings::Hidden - ); - assert_eq!( - "nextlinehelp".parse::().unwrap(), - ArgSettings::NextLineHelp - ); - assert_eq!( - "requiredelimiter".parse::().unwrap(), - ArgSettings::RequireDelimiter - ); - assert_eq!( - "required".parse::().unwrap(), - ArgSettings::Required - ); - assert_eq!( - "takesvalue".parse::().unwrap(), - ArgSettings::TakesValue - ); - assert_eq!( - "usevaluedelimiter".parse::().unwrap(), - ArgSettings::UseValueDelimiter - ); - assert_eq!( - "requireequals".parse::().unwrap(), - ArgSettings::RequireEquals - ); - assert_eq!("last".parse::().unwrap(), ArgSettings::Last); - assert_eq!( - "hidedefaultvalue".parse::().unwrap(), - ArgSettings::HideDefaultValue - ); - assert_eq!( - "ignorecase".parse::().unwrap(), - ArgSettings::IgnoreCase - ); - #[cfg(feature = "env")] - assert_eq!( - "hideenv".parse::().unwrap(), - ArgSettings::HideEnv - ); - #[cfg(feature = "env")] - assert_eq!( - "hideenvvalues".parse::().unwrap(), - ArgSettings::HideEnvValues - ); - assert_eq!( - "hiddenshorthelp".parse::().unwrap(), - ArgSettings::HiddenShortHelp - ); - assert_eq!( - "hiddenlonghelp".parse::().unwrap(), - ArgSettings::HiddenLongHelp - ); - assert_eq!( - "allowinvalidutf8".parse::().unwrap(), - ArgSettings::AllowInvalidUtf8 - ); - assert_eq!( - "exclusive".parse::().unwrap(), - ArgSettings::Exclusive - ); - assert!("hahahaha".parse::().is_err()); + fn setting() { + let m = Arg::new("setting").setting(ArgSettings::Required); + assert!(m.is_required_set()); + } + + #[test] + fn unset_setting() { + let m = Arg::new("unset_setting").setting(ArgSettings::Required); + assert!(m.is_required_set()); + + let m = m.unset_setting(ArgSettings::Required); + assert!(!m.is_required_set(), "{:#?}", m); + } + + #[test] + fn setting_bitor() { + let m = Arg::new("setting_bitor") + .setting(ArgSettings::Required | ArgSettings::Hidden | ArgSettings::Last); + + assert!(m.is_required_set()); + assert!(m.is_hide_set()); + assert!(m.is_last_set()); + } + + #[test] + fn unset_setting_bitor() { + let m = Arg::new("unset_setting_bitor") + .setting(ArgSettings::Required) + .setting(ArgSettings::Hidden) + .setting(ArgSettings::Last); + + assert!(m.is_required_set()); + assert!(m.is_hide_set()); + assert!(m.is_last_set()); + + let m = m.unset_setting(ArgSettings::Required | ArgSettings::Hidden | ArgSettings::Last); + assert!(!m.is_required_set(), "{:#?}", m); + assert!(!m.is_hide_set(), "{:#?}", m); + assert!(!m.is_last_set(), "{:#?}", m); } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/command.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/command.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/command.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/command.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,7 +1,6 @@ -#![allow(deprecated)] +#![cfg_attr(not(feature = "usage"), allow(unused_mut))] // Std -use std::collections::HashMap; use std::env; use std::ffi::OsString; use std::fmt; @@ -9,23 +8,24 @@ use std::ops::Index; use std::path::Path; -// Third Party -#[cfg(feature = "yaml")] -use yaml_rust::Yaml; - // Internal use crate::builder::app_settings::{AppFlags, AppSettings}; use crate::builder::arg_settings::ArgSettings; -use crate::builder::{arg::ArgProvider, Arg, ArgGroup, ArgPredicate}; +use crate::builder::ArgAction; +use crate::builder::IntoResettable; +use crate::builder::PossibleValue; +use crate::builder::Str; +use crate::builder::StyledStr; +use crate::builder::{Arg, ArgGroup, ArgPredicate}; use crate::error::ErrorKind; use crate::error::Result as ClapResult; use crate::mkeymap::MKeyMap; use crate::output::fmt::Stream; -use crate::output::{fmt::Colorizer, Help, HelpWriter, Usage}; +use crate::output::{fmt::Colorizer, write_help, Usage}; use crate::parser::{ArgMatcher, ArgMatches, Parser}; use crate::util::ChildGraph; -use crate::util::{color::ColorChoice, Id, Key}; -use crate::PossibleValue; +use crate::util::FlatMap; +use crate::util::{color::ColorChoice, Id}; use crate::{Error, INTERNAL_ERROR_MSG}; #[cfg(debug_assertions)] @@ -43,11 +43,11 @@ /// [`CommandFactory::command`][crate::CommandFactory::command] to access the /// `Command`. /// -/// - [Basic API][crate::App#basic-api] -/// - [Application-wide Settings][crate::App#application-wide-settings] -/// - [Command-specific Settings][crate::App#command-specific-settings] -/// - [Subcommand-specific Settings][crate::App#subcommand-specific-settings] -/// - [Reflection][crate::App#reflection] +/// - [Basic API][crate::Command#basic-api] +/// - [Application-wide Settings][crate::Command#application-wide-settings] +/// - [Command-specific Settings][crate::Command#command-specific-settings] +/// - [Subcommand-specific Settings][crate::Command#subcommand-specific-settings] +/// - [Reflection][crate::Command#reflection] /// /// # Examples /// @@ -66,55 +66,50 @@ /// /// // Your program logic starts here... /// ``` -/// [`App::get_matches`]: Command::get_matches() -pub type Command<'help> = App<'help>; - -/// Deprecated, replaced with [`Command`] -#[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.0", note = "Replaced with `Command`") -)] -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct App<'help> { - id: Id, - name: String, - long_flag: Option<&'help str>, +/// [`Command::get_matches`]: Command::get_matches() +#[derive(Debug, Clone)] +pub struct Command { + name: Str, + long_flag: Option, short_flag: Option, display_name: Option, bin_name: Option, - author: Option<&'help str>, - version: Option<&'help str>, - long_version: Option<&'help str>, - about: Option<&'help str>, - long_about: Option<&'help str>, - before_help: Option<&'help str>, - before_long_help: Option<&'help str>, - after_help: Option<&'help str>, - after_long_help: Option<&'help str>, - aliases: Vec<(&'help str, bool)>, // (name, visible) - short_flag_aliases: Vec<(char, bool)>, // (name, visible) - long_flag_aliases: Vec<(&'help str, bool)>, // (name, visible) - usage_str: Option<&'help str>, + author: Option, + version: Option, + long_version: Option, + about: Option, + long_about: Option, + before_help: Option, + before_long_help: Option, + after_help: Option, + after_long_help: Option, + aliases: Vec<(Str, bool)>, // (name, visible) + short_flag_aliases: Vec<(char, bool)>, // (name, visible) + long_flag_aliases: Vec<(Str, bool)>, // (name, visible) + usage_str: Option, usage_name: Option, - help_str: Option<&'help str>, + help_str: Option, disp_ord: Option, term_w: Option, max_w: Option, - template: Option<&'help str>, + #[cfg(feature = "help")] + template: Option, settings: AppFlags, g_settings: AppFlags, - args: MKeyMap<'help>, - subcommands: Vec>, - replacers: HashMap<&'help str, &'help [&'help str]>, - groups: Vec>, - current_help_heading: Option<&'help str>, + args: MKeyMap, + subcommands: Vec, + replacers: FlatMap>, + groups: Vec, + current_help_heading: Option, current_disp_ord: Option, - subcommand_value_name: Option<&'help str>, - subcommand_heading: Option<&'help str>, + subcommand_value_name: Option, + subcommand_heading: Option, + external_value_parser: Option, + long_help_exists: bool, } /// # Basic API -impl<'help> App<'help> { +impl Command { /// Creates a new instance of an `Command`. /// /// It is common, but not required, to use binary name as the `name`. This @@ -130,31 +125,16 @@ /// Command::new("My Program") /// # ; /// ``` - pub fn new>(name: S) -> Self { + pub fn new(name: impl Into) -> Self { /// The actual implementation of `new`, non-generic to save code size. /// /// If we don't do this rustc will unnecessarily generate multiple versions /// of this code. - fn new_inner<'help>(name: String) -> App<'help> { - App { - id: Id::from(&*name), + fn new_inner(name: Str) -> Command { + Command { name, ..Default::default() } - .arg( - Arg::new("help") - .long("help") - .help("Print help information") - .global(true) - .generated(), - ) - .arg( - Arg::new("version") - .long("version") - .help("Print version information") - .global(true) - .generated(), - ) } new_inner(name.into()) @@ -182,19 +162,24 @@ /// ``` /// [argument]: Arg #[must_use] - pub fn arg>>(mut self, a: A) -> Self { - let mut arg = a.into(); + pub fn arg(mut self, a: impl Into) -> Self { + let arg = a.into(); + self.arg_internal(arg); + self + } + + fn arg_internal(&mut self, mut arg: Arg) { if let Some(current_disp_ord) = self.current_disp_ord.as_mut() { - if !arg.is_positional() && arg.provider != ArgProvider::Generated { + if !arg.is_positional() { let current = *current_disp_ord; - arg.disp_ord.set_implicit(current); + arg.disp_ord.get_or_insert(current); *current_disp_ord = current + 1; } } - arg.help_heading.get_or_insert(self.current_help_heading); + arg.help_heading + .get_or_insert_with(|| self.current_help_heading.clone()); self.args.push(arg); - self } /// Adds multiple [arguments] to the list of valid possibilities. @@ -204,7 +189,7 @@ /// ```no_run /// # use clap::{Command, arg, Arg}; /// Command::new("myprog") - /// .args(&[ + /// .args([ /// arg!("[debug] -d 'turns on debugging info'"), /// Arg::new("input").help("the input file to use") /// ]) @@ -212,15 +197,7 @@ /// ``` /// [arguments]: Arg #[must_use] - pub fn args(mut self, args: I) -> Self - where - I: IntoIterator, - T: Into>, - { - let args = args.into_iter(); - let (lower, _) = args.size_hint(); - self.args.reserve(lower); - + pub fn args(mut self, args: impl IntoIterator>) -> Self { for arg in args { self = self.arg(arg); } @@ -231,14 +208,19 @@ /// /// This can be useful for modifying the auto-generated help or version arguments. /// + /// # Panics + /// + /// If the argument is undefined + /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// /// let mut cmd = Command::new("foo") /// .arg(Arg::new("bar") - /// .short('b')) + /// .short('b') + /// .action(ArgAction::SetTrue)) /// .mut_arg("bar", |a| a.short('B')); /// /// let res = cmd.try_get_matches_from_mut(vec!["foo", "-b"]); @@ -252,23 +234,16 @@ /// assert!(res.is_ok()); /// ``` #[must_use] - pub fn mut_arg(mut self, arg_id: T, f: F) -> Self + #[cfg_attr(debug_assertions, track_caller)] + pub fn mut_arg(mut self, arg_id: impl AsRef, f: F) -> Self where - F: FnOnce(Arg<'help>) -> Arg<'help>, - T: Key + Into<&'help str>, + F: FnOnce(Arg) -> Arg, { - let arg_id: &str = arg_id.into(); - let id = Id::from(arg_id); - - let mut a = self.args.remove_by_name(&id).unwrap_or_else(|| Arg { - id, - name: arg_id, - ..Arg::default() - }); - - if a.provider == ArgProvider::Generated { - a.provider = ArgProvider::GeneratedMutated; - } + let id = arg_id.as_ref(); + let a = self + .args + .remove_by_name(id) + .unwrap_or_else(|| panic!("Argument `{}` is undefined", id)); self.args.push(f(a)); self @@ -279,6 +254,10 @@ /// This can be useful for modifying auto-generated arguments of nested subcommands with /// [`Command::mut_arg`]. /// + /// # Panics + /// + /// If the subcommand is undefined + /// /// # Examples /// /// ```rust @@ -298,20 +277,17 @@ /// assert!(res.is_ok()); /// ``` #[must_use] - pub fn mut_subcommand<'a, T, F>(mut self, subcmd_id: T, f: F) -> Self + pub fn mut_subcommand(mut self, name: impl AsRef, f: F) -> Self where - F: FnOnce(App<'help>) -> App<'help>, - T: Into<&'a str>, + F: FnOnce(Self) -> Self, { - let subcmd_id: &str = subcmd_id.into(); - let id = Id::from(subcmd_id); - - let pos = self.subcommands.iter().position(|s| s.id == id); + let name = name.as_ref(); + let pos = self.subcommands.iter().position(|s| s.name == name); let subcmd = if let Some(idx) = pos { self.subcommands.remove(idx) } else { - App::new(subcmd_id) + panic!("Command `{}` is undefined", name) }; self.subcommands.push(f(subcmd)); @@ -345,13 +321,13 @@ /// .arg(arg!("--minor 'auto increase minor'")) /// .arg(arg!("--patch 'auto increase patch'")) /// .group(ArgGroup::new("vers") - /// .args(&["set-ver", "major", "minor","patch"]) + /// .args(["set-ver", "major", "minor","patch"]) /// .required(true)) /// # ; /// ``` #[inline] #[must_use] - pub fn group>>(mut self, group: G) -> Self { + pub fn group(mut self, group: impl Into) -> Self { self.groups.push(group.into()); self } @@ -369,21 +345,17 @@ /// .arg(arg!("--patch 'auto increase patch'")) /// .arg(arg!("-c [FILE] 'a config file'")) /// .arg(arg!("-i [IFACE] 'an interface'")) - /// .groups(&[ + /// .groups([ /// ArgGroup::new("vers") - /// .args(&["set-ver", "major", "minor","patch"]) + /// .args(["set-ver", "major", "minor","patch"]) /// .required(true), /// ArgGroup::new("input") - /// .args(&["c", "i"]) + /// .args(["c", "i"]) /// ]) /// # ; /// ``` #[must_use] - pub fn groups(mut self, groups: I) -> Self - where - I: IntoIterator, - T: Into>, - { + pub fn groups(mut self, groups: impl IntoIterator>) -> Self { for g in groups.into_iter() { self = self.group(g.into()); } @@ -412,8 +384,18 @@ /// ``` #[inline] #[must_use] - pub fn subcommand>>(mut self, subcmd: S) -> Self { - self.subcommands.push(subcmd.into()); + pub fn subcommand(self, subcmd: impl Into) -> Self { + let subcmd = subcmd.into(); + self.subcommand_internal(subcmd) + } + + fn subcommand_internal(mut self, mut subcmd: Self) -> Self { + if let Some(current_disp_ord) = self.current_disp_ord.as_mut() { + let current = *current_disp_ord; + subcmd.disp_ord.get_or_insert(current); + *current_disp_ord = current + 1; + } + self.subcommands.push(subcmd); self } @@ -424,7 +406,7 @@ /// ```rust /// # use clap::{Command, Arg, }; /// # Command::new("myprog") - /// .subcommands( vec![ + /// .subcommands( [ /// Command::new("config").about("Controls configuration functionality") /// .arg(Arg::new("config_file")), /// Command::new("debug").about("Controls debug functionality")]) @@ -432,13 +414,9 @@ /// ``` /// [`IntoIterator`]: std::iter::IntoIterator #[must_use] - pub fn subcommands(mut self, subcmds: I) -> Self - where - I: IntoIterator, - T: Into>, - { - for subcmd in subcmds.into_iter() { - self.subcommands.push(subcmd.into()); + pub fn subcommands(mut self, subcmds: impl IntoIterator>) -> Self { + for subcmd in subcmds { + self = self.subcommand(subcmd); } self } @@ -457,7 +435,7 @@ /// /// ```rust /// # use clap::{Command, Arg, ArgAction}; - /// fn cmd() -> Command<'static> { + /// fn cmd() -> Command { /// Command::new("foo") /// .arg( /// Arg::new("bar").short('b').action(ArgAction::SetTrue) @@ -475,7 +453,7 @@ /// } /// ``` pub fn debug_assert(mut self) { - self._build_all(); + self.build(); } /// Custom error message for post-parsing validation @@ -483,7 +461,7 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, ErrorKind}; + /// # use clap::{Command, error::ErrorKind}; /// let mut cmd = Command::new("myprog"); /// let err = cmd.error(ErrorKind::InvalidValue, "Some failure case"); /// ``` @@ -506,7 +484,7 @@ /// .get_matches(); /// ``` /// [`env::args_os`]: std::env::args_os() - /// [`App::try_get_matches_from_mut`]: Command::try_get_matches_from_mut() + /// [`Command::try_get_matches_from_mut`]: Command::try_get_matches_from_mut() #[inline] pub fn get_matches(self) -> ArgMatches { self.get_matches_from(&mut env::args_os()) @@ -514,7 +492,7 @@ /// Parse [`env::args_os`], exiting on failure. /// - /// Like [`App::get_matches`] but doesn't consume the `Command`. + /// Like [`Command::get_matches`] but doesn't consume the `Command`. /// /// # Panics /// @@ -530,7 +508,7 @@ /// let matches = cmd.get_matches_mut(); /// ``` /// [`env::args_os`]: std::env::args_os() - /// [`App::get_matches`]: Command::get_matches() + /// [`Command::get_matches`]: Command::get_matches() pub fn get_matches_mut(&mut self) -> ArgMatches { self.try_get_matches_from_mut(&mut env::args_os()) .unwrap_or_else(|e| e.exit()) @@ -562,8 +540,8 @@ /// [`clap::Result`]: Result /// [`clap::Error`]: crate::Error /// [`kind`]: crate::Error - /// [`ErrorKind::DisplayHelp`]: crate::ErrorKind::DisplayHelp - /// [`ErrorKind::DisplayVersion`]: crate::ErrorKind::DisplayVersion + /// [`ErrorKind::DisplayHelp`]: crate::error::ErrorKind::DisplayHelp + /// [`ErrorKind::DisplayVersion`]: crate::error::ErrorKind::DisplayVersion #[inline] pub fn try_get_matches(self) -> ClapResult { // Start the parsing @@ -589,7 +567,7 @@ /// // Args and options go here... /// .get_matches_from(arg_vec); /// ``` - /// [`App::get_matches`]: Command::get_matches() + /// [`Command::get_matches`]: Command::get_matches() /// [`clap::Result`]: Result /// [`Vec`]: std::vec::Vec pub fn get_matches_from(mut self, itr: I) -> ArgMatches @@ -628,15 +606,15 @@ /// .try_get_matches_from(arg_vec) /// .unwrap_or_else(|e| e.exit()); /// ``` - /// [`App::get_matches_from`]: Command::get_matches_from() - /// [`App::try_get_matches`]: Command::try_get_matches() + /// [`Command::get_matches_from`]: Command::get_matches_from() + /// [`Command::try_get_matches`]: Command::try_get_matches() /// [`Error::exit`]: crate::Error::exit() /// [`std::process::exit`]: std::process::exit() /// [`clap::Error`]: crate::Error /// [`Error::exit`]: crate::Error::exit() /// [`kind`]: crate::Error - /// [`ErrorKind::DisplayHelp`]: crate::ErrorKind::DisplayHelp - /// [`ErrorKind::DisplayVersion`]: crate::ErrorKind::DisplayVersion + /// [`ErrorKind::DisplayHelp`]: crate::error::ErrorKind::DisplayHelp + /// [`ErrorKind::DisplayVersion`]: crate::error::ErrorKind::DisplayVersion /// [`clap::Result`]: Result pub fn try_get_matches_from(mut self, itr: I) -> ClapResult where @@ -648,7 +626,7 @@ /// Parse the specified arguments, returning a [`clap::Result`] on failure. /// - /// Like [`App::try_get_matches_from`] but doesn't consume the `Command`. + /// Like [`Command::try_get_matches_from`] but doesn't consume the `Command`. /// /// **NOTE:** This method WILL NOT exit when `--help` or `--version` (or short versions) are /// used. It will return a [`clap::Error`], where the [`kind`] is a [`ErrorKind::DisplayHelp`] @@ -673,7 +651,7 @@ /// let matches = cmd.try_get_matches_from_mut(arg_vec) /// .unwrap_or_else(|e| e.exit()); /// ``` - /// [`App::try_get_matches_from`]: Command::try_get_matches_from() + /// [`Command::try_get_matches_from`]: Command::try_get_matches_from() /// [`clap::Result`]: Result /// [`clap::Error`]: crate::Error /// [`kind`]: crate::Error @@ -699,7 +677,7 @@ debug!("Command::try_get_matches_from_mut: Reinserting command into arguments so subcommand parser matches it"); raw_args.insert(&cursor, &[&command]); debug!("Command::try_get_matches_from_mut: Clearing name and bin_name so that displayed command name starts with applet name"); - self.name.clear(); + self.name = "".into(); self.bin_name = None; return self._do_parse(&mut raw_args, cursor); } @@ -743,12 +721,14 @@ /// ``` /// [`io::stdout()`]: std::io::stdout() pub fn print_help(&mut self) -> io::Result<()> { - self._build_self(); + self._build_self(false); let color = self.color_help(); - let mut c = Colorizer::new(Stream::Stdout, color); + let mut styled = StyledStr::new(); let usage = Usage::new(self); - Help::new(HelpWriter::Buffer(&mut c), self, &usage, false).write_help()?; + write_help(&mut styled, self, &usage, false); + + let c = Colorizer::new(Stream::Stdout, color).with_content(styled); c.print() } @@ -768,18 +748,20 @@ /// [`-h` (short)]: Arg::help() /// [`--help` (long)]: Arg::long_help() pub fn print_long_help(&mut self) -> io::Result<()> { - self._build_self(); + self._build_self(false); let color = self.color_help(); - let mut c = Colorizer::new(Stream::Stdout, color); + let mut styled = StyledStr::new(); let usage = Usage::new(self); - Help::new(HelpWriter::Buffer(&mut c), self, &usage, true).write_help()?; + write_help(&mut styled, self, &usage, true); + + let c = Colorizer::new(Stream::Stdout, color).with_content(styled); c.print() } - /// Writes the short help message (`-h`) to a [`io::Write`] object. + /// Render the short help message (`-h`) to a [`StyledStr`] /// - /// See also [`Command::write_long_help`]. + /// See also [`Command::render_long_help`]. /// /// # Examples /// @@ -788,22 +770,24 @@ /// use std::io; /// let mut cmd = Command::new("myprog"); /// let mut out = io::stdout(); - /// cmd.write_help(&mut out).expect("failed to write to stdout"); + /// let help = cmd.render_help(); + /// println!("{}", help); /// ``` /// [`io::Write`]: std::io::Write /// [`-h` (short)]: Arg::help() /// [`--help` (long)]: Arg::long_help() - pub fn write_help(&mut self, w: &mut W) -> io::Result<()> { - self._build_self(); + pub fn render_help(&mut self) -> StyledStr { + self._build_self(false); + let mut styled = StyledStr::new(); let usage = Usage::new(self); - Help::new(HelpWriter::Normal(w), self, &usage, false).write_help()?; - w.flush() + write_help(&mut styled, self, &usage, false); + styled } - /// Writes the long help message (`--help`) to a [`io::Write`] object. + /// Render the long help message (`--help`) to a [`StyledStr`]. /// - /// See also [`Command::write_help`]. + /// See also [`Command::render_help`]. /// /// # Examples /// @@ -812,16 +796,48 @@ /// use std::io; /// let mut cmd = Command::new("myprog"); /// let mut out = io::stdout(); - /// cmd.write_long_help(&mut out).expect("failed to write to stdout"); + /// let help = cmd.render_long_help(); + /// println!("{}", help); /// ``` /// [`io::Write`]: std::io::Write /// [`-h` (short)]: Arg::help() /// [`--help` (long)]: Arg::long_help() + pub fn render_long_help(&mut self) -> StyledStr { + self._build_self(false); + + let mut styled = StyledStr::new(); + let usage = Usage::new(self); + write_help(&mut styled, self, &usage, true); + styled + } + + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Command::render_help`") + )] + pub fn write_help(&mut self, w: &mut W) -> io::Result<()> { + self._build_self(false); + + let mut styled = StyledStr::new(); + let usage = Usage::new(self); + write_help(&mut styled, self, &usage, false); + ok!(write!(w, "{}", styled)); + w.flush() + } + + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Command::render_long_help`") + )] pub fn write_long_help(&mut self, w: &mut W) -> io::Result<()> { - self._build_self(); + self._build_self(false); + let mut styled = StyledStr::new(); let usage = Usage::new(self); - Help::new(HelpWriter::Normal(w), self, &usage, true).write_help()?; + write_help(&mut styled, self, &usage, true); + ok!(write!(w, "{}", styled)); w.flush() } @@ -883,10 +899,14 @@ /// let mut cmd = Command::new("myprog"); /// println!("{}", cmd.render_usage()); /// ``` - pub fn render_usage(&mut self) -> String { + pub fn render_usage(&mut self) -> StyledStr { + self.render_usage_().unwrap_or_default() + } + + pub(crate) fn render_usage_(&mut self) -> Option { // If there are global arguments, or settings we need to propagate them down to subcommands // before parsing incase we run into a subcommand - self._build_self(); + self._build_self(false); Usage::new(self).create_usage_with_title(&[]) } @@ -896,11 +916,11 @@ /// /// These settings will apply to the top-level command and all subcommands, by default. Some /// settings can be overridden in subcommands. -impl<'help> App<'help> { +impl Command { /// Specifies that the parser should not assume the first argument passed is the binary name. /// /// This is normally the case when using a "daemon" style mode. For shells / REPLs, see - /// [`Command::multicall`][App::multicall]. + /// [`Command::multicall`][Command::multicall]. /// /// # Examples /// @@ -911,7 +931,7 @@ /// .arg(arg!( ... "commands to run")) /// .get_matches_from(vec!["command", "set"]); /// - /// let cmds: Vec<&str> = m.values_of("cmd").unwrap().collect(); + /// let cmds: Vec<_> = m.get_many::("cmd").unwrap().collect(); /// assert_eq!(cmds, ["command", "set"]); /// ``` /// [`try_get_matches_from_mut`]: crate::Command::try_get_matches_from_mut() @@ -926,9 +946,6 @@ /// Try not to fail on parse errors, like missing option values. /// - /// **Note:** Make sure you apply it as `global_setting` if you want this setting - /// to be propagated to subcommands and sub-subcommands! - /// /// **NOTE:** This choice is propagated to all child subcommands. /// /// # Examples @@ -937,17 +954,17 @@ /// # use clap::{Command, arg}; /// let cmd = Command::new("cmd") /// .ignore_errors(true) - /// .arg(arg!(-c --config "Sets a custom config file").required(false)) - /// .arg(arg!(-x --stuff "Sets a custom stuff file").required(false)) + /// .arg(arg!(-c --config "Sets a custom config file")) + /// .arg(arg!(-x --stuff "Sets a custom stuff file")) /// .arg(arg!(f: -f "Flag")); /// /// let r = cmd.try_get_matches_from(vec!["cmd", "-c", "file", "-f", "-x"]); /// /// assert!(r.is_ok(), "unexpected error: {:?}", r); /// let m = r.unwrap(); - /// assert_eq!(m.value_of("config"), Some("file")); - /// assert!(m.is_present("f")); - /// assert_eq!(m.value_of("stuff"), None); + /// assert_eq!(m.get_one::("config").unwrap(), "file"); + /// assert!(*m.get_one::("f").expect("defaulted")); + /// assert_eq!(m.get_one::("stuff"), None); /// ``` #[inline] pub fn ignore_errors(self, yes: bool) -> Self { @@ -958,23 +975,18 @@ } } - /// Deprecated, replaced with [`ArgAction::Set`][super::ArgAction::Set] + /// Replace prior occurrences of arguments rather than error /// - /// The new actions (`ArgAction::Set`, `ArgAction::SetTrue`) do this by default. + /// For any argument that would conflict with itself by default (e.g. + /// [`ArgAction::Set`][ArgAction::Set], it will now override itself. /// - /// See `ArgAction::StoreValue` and `ArgAction::IncOccurrence` for how to migrate - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.2.0", - note = "Replaced with `Arg::action(ArgAction::...)` - -The new actions (`ArgAction::Set`, `ArgAction::SetTrue`) do this by default. - -See `ArgAction::StoreValue` and `ArgAction::IncOccurrence` for how to migrate -" - ) - )] + /// This is the equivalent to saying the `foo` arg using [`Arg::overrides_with("foo")`] for all + /// defined arguments. + /// + /// **NOTE:** This choice is propagated to all child subcommands. + /// + /// [`Arg::overrides_with("foo")`]: crate::Arg::overrides_with() + #[inline] pub fn args_override_self(self, yes: bool) -> Self { if yes { self.global_setting(AppSettings::AllArgsOverrideSelf) @@ -987,7 +999,7 @@ /// was used. /// /// **NOTE:** The same thing can be done manually by setting the final positional argument to - /// [`Arg::use_value_delimiter(false)`]. Using this setting is safer, because it's easier to locate + /// [`Arg::value_delimiter(None)`]. Using this setting is safer, because it's easier to locate /// when making changes. /// /// **NOTE:** This choice is propagated to all child subcommands. @@ -1001,7 +1013,7 @@ /// .get_matches(); /// ``` /// - /// [`Arg::use_value_delimiter(false)`]: crate::Arg::use_value_delimiter() + /// [`Arg::value_delimiter(None)`]: crate::Arg::value_delimiter() #[inline] pub fn dont_delimit_trailing_values(self, yes: bool) -> Self { if yes { @@ -1030,7 +1042,6 @@ #[inline] #[must_use] pub fn color(self, color: ColorChoice) -> Self { - #![allow(deprecated)] let cmd = self .unset_global_setting(AppSettings::ColorAuto) .unset_global_setting(AppSettings::ColorAlways) @@ -1051,6 +1062,8 @@ /// /// **NOTE:** This setting applies globally and *not* on a per-command basis. /// + /// **NOTE:** This requires the [`wrap_help` feature][crate::_features] + /// /// # Examples /// /// ```no_run @@ -1066,15 +1079,17 @@ self } - /// Sets the maximum terminal width at which to wrap help messages. + /// Limit the line length for wrapping help when using the current terminal's width. /// - /// This only applies when setting the current terminal width. See [`Command::term_width`] for - /// more details. + /// This only applies when [`term_width`][Command::term_width] is unset so that the current + /// terminal's width will be used. See [`Command::term_width`] for more details. /// - /// Using `0` will ignore terminal widths and use source formatting. + /// Using `0` will ignore terminal widths and use source formatting (default). /// /// **NOTE:** This setting applies globally and *not* on a per-command basis. /// + /// **NOTE:** This requires the [`wrap_help` feature][crate::_features] + /// /// # Examples /// /// ```no_run @@ -1095,7 +1110,7 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, ErrorKind}; + /// # use clap::{Command, error::ErrorKind}; /// let res = Command::new("myprog") /// .disable_version_flag(true) /// .try_get_matches_from(vec![ @@ -1117,9 +1132,6 @@ /// /// Defaults to `false`; subcommands have independent version strings from their parents. /// - /// **Note:** Make sure you apply it as `global_setting` if you want this setting - /// to be propagated to subcommands and sub-subcommands! - /// /// **NOTE:** This choice is propagated to all child subcommands. /// /// # Examples @@ -1173,7 +1185,7 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, ErrorKind}; + /// # use clap::{Command, error::ErrorKind}; /// let res = Command::new("myprog") /// .disable_help_flag(true) /// .try_get_matches_from(vec![ @@ -1196,7 +1208,7 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, ErrorKind}; + /// # use clap::{Command, error::ErrorKind}; /// let res = Command::new("myprog") /// .disable_help_subcommand(true) /// // Normally, creating a subcommand causes a `help` subcommand to automatically @@ -1206,7 +1218,7 @@ /// "myprog", "help" /// ]); /// assert!(res.is_err()); - /// assert_eq!(res.unwrap_err().kind(), ErrorKind::UnknownArgument); + /// assert_eq!(res.unwrap_err().kind(), ErrorKind::InvalidSubcommand); /// ``` /// /// [`subcommand`]: crate::Command::subcommand() @@ -1283,25 +1295,13 @@ } } - /// Disables the automatic collapsing of positional args into `[ARGS]` inside the usage string. - /// - /// **NOTE:** This choice is propagated to all child subcommands. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Command, Arg}; - /// Command::new("myprog") - /// .dont_collapse_args_in_usage(true) - /// .get_matches(); - /// ``` - #[inline] - pub fn dont_collapse_args_in_usage(self, yes: bool) -> Self { - if yes { - self.global_setting(AppSettings::DontCollapseArgsInUsage) - } else { - self.unset_global_setting(AppSettings::DontCollapseArgsInUsage) - } + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "This is now the default") + )] + pub fn dont_collapse_args_in_usage(self, _yes: bool) -> Self { + self } /// Tells `clap` *not* to print possible values when displaying help information. @@ -1387,7 +1387,7 @@ /// # Command-specific Settings /// /// These apply only to the current command and are not inherited by subcommands. -impl<'help> App<'help> { +impl Command { /// (Re)Sets the program's name. /// /// See [`Command::new`] for more details. @@ -1395,15 +1395,13 @@ /// # Examples /// /// ```ignore - /// # use clap::{Command, load_yaml}; - /// let yaml = load_yaml!("cmd.yaml"); - /// let cmd = Command::from(yaml) - /// .name(crate_name!()); + /// let cmd = clap::command!() + /// .name("foo"); /// /// // continued logic goes here, such as `cmd.get_matches()` etc. /// ``` #[must_use] - pub fn name>(mut self, name: S) -> Self { + pub fn name(mut self, name: impl Into) -> Self { self.name = name.into(); self } @@ -1429,8 +1427,8 @@ /// # ; /// ``` #[must_use] - pub fn bin_name>(mut self, name: S) -> Self { - self.bin_name = Some(name.into()); + pub fn bin_name(mut self, name: impl IntoResettable) -> Self { + self.bin_name = name.into_resettable().into_option(); self } @@ -1445,8 +1443,8 @@ /// # ; /// ``` #[must_use] - pub fn display_name>(mut self, name: S) -> Self { - self.display_name = Some(name.into()); + pub fn display_name(mut self, name: impl IntoResettable) -> Self { + self.display_name = name.into_resettable().into_option(); self } @@ -1456,6 +1454,9 @@ /// automatically set your application's author(s) to the same thing as your /// crate at compile time. /// + /// **NOTE:** A custom [`help_template`][Command::help_template] is needed for author to show + /// up. + /// /// # Examples /// /// ```no_run @@ -1464,10 +1465,9 @@ /// .author("Me, me@mymain.com") /// # ; /// ``` - /// [`crate_authors!`]: ./macro.crate_authors!.html #[must_use] - pub fn author>(mut self, author: S) -> Self { - self.author = Some(author.into()); + pub fn author(mut self, author: impl IntoResettable) -> Self { + self.author = author.into_resettable().into_option(); self } @@ -1489,8 +1489,8 @@ /// # ; /// ``` #[must_use] - pub fn about>>(mut self, about: O) -> Self { - self.about = about.into(); + pub fn about(mut self, about: impl IntoResettable) -> Self { + self.about = about.into_resettable().into_option(); self } @@ -1512,10 +1512,10 @@ /// a few lines of text, but that's ok!") /// # ; /// ``` - /// [`App::about`]: Command::about() + /// [`Command::about`]: Command::about() #[must_use] - pub fn long_about>>(mut self, long_about: O) -> Self { - self.long_about = long_about.into(); + pub fn long_about(mut self, long_about: impl IntoResettable) -> Self { + self.long_about = long_about.into_resettable().into_option(); self } @@ -1536,8 +1536,8 @@ /// ``` /// #[must_use] - pub fn after_help>(mut self, help: S) -> Self { - self.after_help = Some(help.into()); + pub fn after_help(mut self, help: impl IntoResettable) -> Self { + self.after_help = help.into_resettable().into_option(); self } @@ -1558,8 +1558,8 @@ /// # ; /// ``` #[must_use] - pub fn after_long_help>(mut self, help: S) -> Self { - self.after_long_help = Some(help.into()); + pub fn after_long_help(mut self, help: impl IntoResettable) -> Self { + self.after_long_help = help.into_resettable().into_option(); self } @@ -1578,8 +1578,8 @@ /// # ; /// ``` #[must_use] - pub fn before_help>(mut self, help: S) -> Self { - self.before_help = Some(help.into()); + pub fn before_help(mut self, help: impl IntoResettable) -> Self { + self.before_help = help.into_resettable().into_option(); self } @@ -1598,8 +1598,8 @@ /// # ; /// ``` #[must_use] - pub fn before_long_help>(mut self, help: S) -> Self { - self.before_long_help = Some(help.into()); + pub fn before_long_help(mut self, help: impl IntoResettable) -> Self { + self.before_long_help = help.into_resettable().into_option(); self } @@ -1619,10 +1619,9 @@ /// .version("v0.1.24") /// # ; /// ``` - /// [`crate_version!`]: ./macro.crate_version!.html #[must_use] - pub fn version>(mut self, ver: S) -> Self { - self.version = Some(ver.into()); + pub fn version(mut self, ver: impl IntoResettable) -> Self { + self.version = ver.into_resettable().into_option(); self } @@ -1647,10 +1646,9 @@ /// binary: myprog") /// # ; /// ``` - /// [`crate_version!`]: ./macro.crate_version!.html #[must_use] - pub fn long_version>(mut self, ver: S) -> Self { - self.long_version = Some(ver.into()); + pub fn long_version(mut self, ver: impl IntoResettable) -> Self { + self.long_version = ver.into_resettable().into_option(); self } @@ -1665,7 +1663,7 @@ /// correctly by the default help formatter: /// /// - Do not indent the first usage line. - /// - Indent all subsequent usage lines with four spaces. + /// - Indent all subsequent usage lines with seven spaces. /// - The last line must not end with a newline. /// /// # Examples @@ -1683,8 +1681,8 @@ /// # use clap::{Command, Arg}; /// Command::new("myprog") /// .override_usage( - /// "myapp -X [-a] [-b] \n \ - /// myapp -Y [-c] \n \ + /// "myapp -X [-a] [-b] \n \ + /// myapp -Y [-c] \n \ /// myapp -Z [-d|-e]" /// ) /// # ; @@ -1692,8 +1690,8 @@ /// /// [`ArgMatches::usage`]: ArgMatches::usage() #[must_use] - pub fn override_usage>(mut self, usage: S) -> Self { - self.usage_str = Some(usage.into()); + pub fn override_usage(mut self, usage: impl IntoResettable) -> Self { + self.usage_str = usage.into_resettable().into_option(); self } @@ -1715,7 +1713,7 @@ /// Does awesome things\n\ /// (C) me@mail.com\n\n\ /// - /// USAGE: myapp \n\n\ + /// Usage: myapp \n\n\ /// /// Options:\n\ /// -h, --help Display this message\n\ @@ -1729,8 +1727,8 @@ /// # ; /// ``` #[must_use] - pub fn override_help>(mut self, help: S) -> Self { - self.help_str = Some(help.into()); + pub fn override_help(mut self, help: impl IntoResettable) -> Self { + self.help_str = help.into_resettable().into_option(); self } @@ -1760,11 +1758,14 @@ /// * `{options}` - Help for options. /// * `{positionals}` - Help for positional arguments. /// * `{subcommands}` - Help for subcommands. - /// * `{after-help}` - Help from [`App::after_help`] or [`Command::after_long_help`]. - /// * `{before-help}` - Help from [`App::before_help`] or [`Command::before_long_help`]. + /// * `{tag}` - Standard tab sized used within clap + /// * `{after-help}` - Help from [`Command::after_help`] or [`Command::after_long_help`]. + /// * `{before-help}` - Help from [`Command::before_help`] or [`Command::before_long_help`]. /// /// # Examples /// + /// For a very brief help: + /// /// ```no_run /// # use clap::Command; /// Command::new("myprog") @@ -1772,43 +1773,38 @@ /// .help_template("{bin} ({version}) - {usage}") /// # ; /// ``` - /// [`App::about`]: Command::about() - /// [`App::long_about`]: Command::long_about() - /// [`App::after_help`]: Command::after_help() - /// [`App::after_long_help`]: Command::after_long_help() - /// [`App::before_help`]: Command::before_help() - /// [`App::before_long_help`]: Command::before_long_help() - #[must_use] - pub fn help_template>(mut self, s: S) -> Self { - self.template = Some(s.into()); - self - } - - /// Apply a setting for the current command or subcommand. - /// - /// See [`Command::global_setting`] to apply a setting to this command and all subcommands. /// - /// See [`AppSettings`] for a full list of possibilities and examples. - /// - /// # Examples + /// For showing more application context: /// /// ```no_run - /// # use clap::{Command, AppSettings}; - /// Command::new("myprog") - /// .setting(AppSettings::SubcommandRequired) - /// .setting(AppSettings::AllowLeadingHyphen) - /// # ; - /// ``` - /// or - /// ```no_run - /// # use clap::{Command, AppSettings}; + /// # use clap::Command; /// Command::new("myprog") - /// .setting(AppSettings::SubcommandRequired | AppSettings::AllowLeadingHyphen) + /// .version("1.0") + /// .help_template("\ + /// {before-help}{name} {version} + /// {author-with-newline}{about-with-newline} + /// {usage-heading} {usage} + /// + /// {all-args}{after-help} + /// ") /// # ; /// ``` + /// [`Command::about`]: Command::about() + /// [`Command::long_about`]: Command::long_about() + /// [`Command::after_help`]: Command::after_help() + /// [`Command::after_long_help`]: Command::after_long_help() + /// [`Command::before_help`]: Command::before_help() + /// [`Command::before_long_help`]: Command::before_long_help() + #[must_use] + #[cfg(feature = "help")] + pub fn help_template(mut self, s: impl IntoResettable) -> Self { + self.template = s.into_resettable().into_option(); + self + } + #[inline] #[must_use] - pub fn setting(mut self, setting: F) -> Self + pub(crate) fn setting(mut self, setting: F) -> Self where F: Into, { @@ -1816,29 +1812,9 @@ self } - /// Remove a setting for the current command or subcommand. - /// - /// See [`AppSettings`] for a full list of possibilities and examples. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Command, AppSettings}; - /// Command::new("myprog") - /// .unset_setting(AppSettings::SubcommandRequired) - /// .setting(AppSettings::AllowLeadingHyphen) - /// # ; - /// ``` - /// or - /// ```no_run - /// # use clap::{Command, AppSettings}; - /// Command::new("myprog") - /// .unset_setting(AppSettings::SubcommandRequired | AppSettings::AllowLeadingHyphen) - /// # ; - /// ``` #[inline] #[must_use] - pub fn unset_setting(mut self, setting: F) -> Self + pub(crate) fn unset_setting(mut self, setting: F) -> Self where F: Into, { @@ -1846,81 +1822,37 @@ self } - /// Apply a setting for the current command and all subcommands. - /// - /// See [`Command::setting`] to apply a setting only to this command. - /// - /// See [`AppSettings`] for a full list of possibilities and examples. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Command, AppSettings}; - /// Command::new("myprog") - /// .global_setting(AppSettings::AllowNegativeNumbers) - /// # ; - /// ``` #[inline] #[must_use] - pub fn global_setting(mut self, setting: AppSettings) -> Self { + pub(crate) fn global_setting(mut self, setting: AppSettings) -> Self { self.settings.set(setting); self.g_settings.set(setting); self } - /// Remove a setting and stop propagating down to subcommands. - /// - /// See [`AppSettings`] for a full list of possibilities and examples. - /// - /// # Examples - /// - /// ```no_run - /// # use clap::{Command, AppSettings}; - /// Command::new("myprog") - /// .unset_global_setting(AppSettings::AllowNegativeNumbers) - /// # ; - /// ``` - /// [global]: Command::global_setting() #[inline] #[must_use] - pub fn unset_global_setting(mut self, setting: AppSettings) -> Self { + pub(crate) fn unset_global_setting(mut self, setting: AppSettings) -> Self { self.settings.unset(setting); self.g_settings.unset(setting); self } - /// Deprecated, replaced with [`Command::next_help_heading`] - #[inline] - #[must_use] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.0", note = "Replaced with `App::next_help_heading`") - )] - pub fn help_heading(self, heading: O) -> Self - where - O: Into>, - { - self.next_help_heading(heading) - } - /// Set the default section heading for future args. /// /// This will be used for any arg that hasn't had [`Arg::help_heading`] called. /// - /// This is useful if the default `OPTIONS` or `ARGS` headings are + /// This is useful if the default `Options` or `Arguments` headings are /// not specific enough for one's use case. /// /// For subcommands, see [`Command::subcommand_help_heading`] /// - /// [`App::arg`]: Command::arg() + /// [`Command::arg`]: Command::arg() /// [`Arg::help_heading`]: crate::Arg::help_heading() #[inline] #[must_use] - pub fn next_help_heading(mut self, heading: O) -> Self - where - O: Into>, - { - self.current_help_heading = heading.into(); + pub fn next_help_heading(mut self, heading: impl IntoResettable) -> Self { + self.current_help_heading = heading.into_resettable().into_option(); self } @@ -1929,8 +1861,8 @@ /// This will be used for any arg that hasn't had [`Arg::display_order`] called. #[inline] #[must_use] - pub fn next_display_order(mut self, disp_ord: impl Into>) -> Self { - self.current_disp_ord = disp_ord.into(); + pub fn next_display_order(mut self, disp_ord: impl IntoResettable) -> Self { + self.current_disp_ord = disp_ord.into_resettable().into_option(); self } @@ -2030,22 +1962,27 @@ /// .action(ArgAction::SetTrue)) /// .arg(Arg::new("format") /// .long("format") - /// .takes_value(true) + /// .action(ArgAction::Set) /// .value_parser(["txt", "json"])) /// .replace("--save-all", &["--save-context", "--save-runtime", "--format=json"]) /// .get_matches_from(vec!["cmd", "--save-all"]); /// /// assert!(*m.get_one::("save-context").expect("defaulted by clap")); /// assert!(*m.get_one::("save-runtime").expect("defaulted by clap")); - /// assert_eq!(m.value_of("format"), Some("json")); + /// assert_eq!(m.get_one::("format").unwrap(), "json"); /// ``` /// - /// [`App::replace`]: Command::replace() + /// [`Command::replace`]: Command::replace() #[inline] #[cfg(feature = "unstable-replace")] #[must_use] - pub fn replace(mut self, name: &'help str, target: &'help [&'help str]) -> Self { - self.replacers.insert(name, target); + pub fn replace( + mut self, + name: impl Into, + target: impl IntoIterator>, + ) -> Self { + self.replacers + .insert(name.into(), target.into_iter().map(Into::into).collect()); self } @@ -2072,32 +2009,11 @@ } } - /// Specifies that leading hyphens are allowed in all argument *values* (e.g. `-10`). - /// - /// Otherwise they will be parsed as another flag or option. See also - /// [`Command::allow_negative_numbers`]. - /// - /// **NOTE:** Use this setting with caution as it silences certain circumstances which would - /// otherwise be an error (such as accidentally forgetting to specify a value for leading - /// option). It is preferred to set this on a per argument basis, via [`Arg::allow_hyphen_values`]. - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Arg, Command}; - /// // Imagine you needed to represent negative numbers as well, such as -10 - /// let m = Command::new("nums") - /// .allow_hyphen_values(true) - /// .arg(Arg::new("neg")) - /// .get_matches_from(vec![ - /// "nums", "-20" - /// ]); - /// - /// assert_eq!(m.value_of("neg"), Some("-20")); - /// # ; - /// ``` - /// [`Arg::allow_hyphen_values`]: crate::Arg::allow_hyphen_values() - #[inline] + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::allow_hyphen_values`") + )] pub fn allow_hyphen_values(self, yes: bool) -> Self { if yes { self.setting(AppSettings::AllowHyphenValues) @@ -2106,26 +2022,11 @@ } } - /// Allows negative numbers to pass as values. - /// - /// This is similar to [`Command::allow_hyphen_values`] except that it only allows numbers, - /// all other undefined leading hyphens will fail to parse. - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, Arg}; - /// let res = Command::new("myprog") - /// .allow_negative_numbers(true) - /// .arg(Arg::new("num")) - /// .try_get_matches_from(vec![ - /// "myprog", "-20" - /// ]); - /// assert!(res.is_ok()); - /// let m = res.unwrap(); - /// assert_eq!(m.value_of("num").unwrap(), "-20"); - /// ``` - #[inline] + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::allow_negative_numbers`") + )] pub fn allow_negative_numbers(self, yes: bool) -> Self { if yes { self.setting(AppSettings::AllowNegativeNumbers) @@ -2134,27 +2035,11 @@ } } - /// Specifies that the final positional argument is a "VarArg" and that `clap` should not - /// attempt to parse any further args. - /// - /// The values of the trailing positional argument will contain all args from itself on. - /// - /// **NOTE:** The final positional argument **must** have [`Arg::multiple_values(true)`] or the usage - /// string equivalent. - /// - /// # Examples - /// - /// ```rust - /// # use clap::{Command, arg}; - /// let m = Command::new("myprog") - /// .trailing_var_arg(true) - /// .arg(arg!( ... "commands to run")) - /// .get_matches_from(vec!["myprog", "arg1", "-r", "val1"]); - /// - /// let trail: Vec<&str> = m.values_of("cmd").unwrap().collect(); - /// assert_eq!(trail, ["arg1", "-r", "val1"]); - /// ``` - /// [`Arg::multiple_values(true)`]: crate::Arg::multiple_values() + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::trailing_var_arg`") + )] pub fn trailing_var_arg(self, yes: bool) -> Self { if yes { self.setting(AppSettings::TrailingVarArg) @@ -2208,8 +2093,8 @@ /// "prog", "other" /// ]); /// - /// assert_eq!(m.value_of("arg1"), None); - /// assert_eq!(m.value_of("arg2"), Some("other")); + /// assert_eq!(m.get_one::("arg1"), None); + /// assert_eq!(m.get_one::("arg2").unwrap(), "other"); /// ``` /// /// Now the same example, but using a default value for the first optional positional argument @@ -2227,46 +2112,46 @@ /// "prog", "other" /// ]); /// - /// assert_eq!(m.value_of("arg1"), Some("something")); - /// assert_eq!(m.value_of("arg2"), Some("other")); + /// assert_eq!(m.get_one::("arg1").unwrap(), "something"); + /// assert_eq!(m.get_one::("arg2").unwrap(), "other"); /// ``` /// /// Style number two from above: /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// // Assume there is an external subcommand named "subcmd" /// let m = Command::new("myprog") /// .allow_missing_positional(true) /// .arg(Arg::new("foo")) /// .arg(Arg::new("bar")) - /// .arg(Arg::new("baz").takes_value(true).multiple_values(true)) + /// .arg(Arg::new("baz").action(ArgAction::Set).num_args(1..)) /// .get_matches_from(vec![ /// "prog", "foo", "bar", "baz1", "baz2", "baz3" /// ]); /// - /// assert_eq!(m.value_of("foo"), Some("foo")); - /// assert_eq!(m.value_of("bar"), Some("bar")); - /// assert_eq!(m.values_of("baz").unwrap().collect::>(), &["baz1", "baz2", "baz3"]); + /// assert_eq!(m.get_one::("foo").unwrap(), "foo"); + /// assert_eq!(m.get_one::("bar").unwrap(), "bar"); + /// assert_eq!(m.get_many::("baz").unwrap().collect::>(), &["baz1", "baz2", "baz3"]); /// ``` /// /// Now nofice if we don't specify `foo` or `baz` but use the `--` operator. /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// // Assume there is an external subcommand named "subcmd" /// let m = Command::new("myprog") /// .allow_missing_positional(true) /// .arg(Arg::new("foo")) /// .arg(Arg::new("bar")) - /// .arg(Arg::new("baz").takes_value(true).multiple_values(true)) + /// .arg(Arg::new("baz").action(ArgAction::Set).num_args(1..)) /// .get_matches_from(vec![ /// "prog", "--", "baz1", "baz2", "baz3" /// ]); /// - /// assert_eq!(m.value_of("foo"), None); - /// assert_eq!(m.value_of("bar"), None); - /// assert_eq!(m.values_of("baz").unwrap().collect::>(), &["baz1", "baz2", "baz3"]); + /// assert_eq!(m.get_one::("foo"), None); + /// assert_eq!(m.get_one::("bar"), None); + /// assert_eq!(m.get_many::("baz").unwrap().collect::>(), &["baz1", "baz2", "baz3"]); /// ``` /// /// [required]: crate::Arg::required() @@ -2281,7 +2166,7 @@ } /// # Subcommand-specific Settings -impl<'help> App<'help> { +impl Command { /// Sets the short version of the subcommand flag without the preceding `-`. /// /// Allows the subcommand to be used as if it were an [`Arg::short`]. @@ -2308,8 +2193,8 @@ /// ``` /// [`Arg::short`]: Arg::short() #[must_use] - pub fn short_flag(mut self, short: char) -> Self { - self.short_flag = Some(short); + pub fn short_flag(mut self, short: impl IntoResettable) -> Self { + self.short_flag = short.into_resettable().into_option(); self } @@ -2346,15 +2231,8 @@ /// /// [`Arg::long`]: Arg::long() #[must_use] - pub fn long_flag(mut self, long: &'help str) -> Self { - #[cfg(feature = "unstable-v4")] - { - self.long_flag = Some(long); - } - #[cfg(not(feature = "unstable-v4"))] - { - self.long_flag = Some(long.trim_start_matches(|c| c == '-')); - } + pub fn long_flag(mut self, long: impl Into) -> Self { + self.long_flag = Some(long.into()); self } @@ -2382,10 +2260,14 @@ /// .get_matches_from(vec!["myprog", "do-stuff"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::visible_alias`]: Command::visible_alias() + /// [`Command::visible_alias`]: Command::visible_alias() #[must_use] - pub fn alias>(mut self, name: S) -> Self { - self.aliases.push((name.into(), false)); + pub fn alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.aliases.push((name, false)); + } else { + self.aliases.clear(); + } self } @@ -2406,9 +2288,13 @@ /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` #[must_use] - pub fn short_flag_alias(mut self, name: char) -> Self { - assert!(name != '-', "short alias name cannot be `-`"); - self.short_flag_aliases.push((name, false)); + pub fn short_flag_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + debug_assert!(name != '-', "short alias name cannot be `-`"); + self.short_flag_aliases.push((name, false)); + } else { + self.short_flag_aliases.clear(); + } self } @@ -2429,8 +2315,12 @@ /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` #[must_use] - pub fn long_flag_alias(mut self, name: &'help str) -> Self { - self.long_flag_aliases.push((name, false)); + pub fn long_flag_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.long_flag_aliases.push((name, false)); + } else { + self.long_flag_aliases.clear(); + } self } @@ -2454,17 +2344,18 @@ /// # use clap::{Command, Arg}; /// let m = Command::new("myprog") /// .subcommand(Command::new("test") - /// .aliases(&["do-stuff", "do-tests", "tests"])) + /// .aliases(["do-stuff", "do-tests", "tests"])) /// .arg(Arg::new("input") /// .help("the file to add") /// .required(false)) /// .get_matches_from(vec!["myprog", "do-tests"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::visible_aliases`]: Command::visible_aliases() + /// [`Command::visible_aliases`]: Command::visible_aliases() #[must_use] - pub fn aliases(mut self, names: &[&'help str]) -> Self { - self.aliases.extend(names.iter().map(|n| (*n, false))); + pub fn aliases(mut self, names: impl IntoIterator>) -> Self { + self.aliases + .extend(names.into_iter().map(|n| (n.into(), false))); self } @@ -2480,7 +2371,7 @@ /// # use clap::{Command, Arg, }; /// let m = Command::new("myprog") /// .subcommand(Command::new("test").short_flag('t') - /// .short_flag_aliases(&['a', 'b', 'c'])) + /// .short_flag_aliases(['a', 'b', 'c'])) /// .arg(Arg::new("input") /// .help("the file to add") /// .required(false)) @@ -2488,10 +2379,10 @@ /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` #[must_use] - pub fn short_flag_aliases(mut self, names: &[char]) -> Self { + pub fn short_flag_aliases(mut self, names: impl IntoIterator) -> Self { for s in names { - assert!(s != &'-', "short alias name cannot be `-`"); - self.short_flag_aliases.push((*s, false)); + debug_assert!(s != '-', "short alias name cannot be `-`"); + self.short_flag_aliases.push((s, false)); } self } @@ -2508,7 +2399,7 @@ /// # use clap::{Command, Arg, }; /// let m = Command::new("myprog") /// .subcommand(Command::new("test").long_flag("test") - /// .long_flag_aliases(&["testing", "testall", "test_all"])) + /// .long_flag_aliases(["testing", "testall", "test_all"])) /// .arg(Arg::new("input") /// .help("the file to add") /// .required(false)) @@ -2516,9 +2407,9 @@ /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` #[must_use] - pub fn long_flag_aliases(mut self, names: &[&'help str]) -> Self { + pub fn long_flag_aliases(mut self, names: impl IntoIterator>) -> Self { for s in names { - self.long_flag_aliases.push((s, false)); + self = self.long_flag_alias(s) } self } @@ -2549,10 +2440,14 @@ /// .get_matches_from(vec!["myprog", "do-stuff"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::alias`]: Command::alias() + /// [`Command::alias`]: Command::alias() #[must_use] - pub fn visible_alias>(mut self, name: S) -> Self { - self.aliases.push((name.into(), true)); + pub fn visible_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.aliases.push((name, true)); + } else { + self.aliases.clear(); + } self } @@ -2574,11 +2469,15 @@ /// .get_matches_from(vec!["myprog", "-d"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::short_flag_alias`]: Command::short_flag_alias() + /// [`Command::short_flag_alias`]: Command::short_flag_alias() #[must_use] - pub fn visible_short_flag_alias(mut self, name: char) -> Self { - assert!(name != '-', "short alias name cannot be `-`"); - self.short_flag_aliases.push((name, true)); + pub fn visible_short_flag_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + debug_assert!(name != '-', "short alias name cannot be `-`"); + self.short_flag_aliases.push((name, true)); + } else { + self.short_flag_aliases.clear(); + } self } @@ -2600,10 +2499,14 @@ /// .get_matches_from(vec!["myprog", "--testing"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::long_flag_alias`]: Command::long_flag_alias() + /// [`Command::long_flag_alias`]: Command::long_flag_alias() #[must_use] - pub fn visible_long_flag_alias(mut self, name: &'help str) -> Self { - self.long_flag_aliases.push((name, true)); + pub fn visible_long_flag_alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.long_flag_aliases.push((name, true)); + } else { + self.long_flag_aliases.clear(); + } self } @@ -2629,14 +2532,15 @@ /// # use clap::{Command, Arg, }; /// let m = Command::new("myprog") /// .subcommand(Command::new("test") - /// .visible_aliases(&["do-stuff", "tests"])) + /// .visible_aliases(["do-stuff", "tests"])) /// .get_matches_from(vec!["myprog", "do-stuff"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::alias`]: Command::alias() + /// [`Command::alias`]: Command::alias() #[must_use] - pub fn visible_aliases(mut self, names: &[&'help str]) -> Self { - self.aliases.extend(names.iter().map(|n| (*n, true))); + pub fn visible_aliases(mut self, names: impl IntoIterator>) -> Self { + self.aliases + .extend(names.into_iter().map(|n| (n.into(), true))); self } @@ -2650,16 +2554,16 @@ /// # use clap::{Command, Arg, }; /// let m = Command::new("myprog") /// .subcommand(Command::new("test").short_flag('b') - /// .visible_short_flag_aliases(&['t'])) + /// .visible_short_flag_aliases(['t'])) /// .get_matches_from(vec!["myprog", "-t"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::short_flag_aliases`]: Command::short_flag_aliases() + /// [`Command::short_flag_aliases`]: Command::short_flag_aliases() #[must_use] - pub fn visible_short_flag_aliases(mut self, names: &[char]) -> Self { + pub fn visible_short_flag_aliases(mut self, names: impl IntoIterator) -> Self { for s in names { - assert!(s != &'-', "short alias name cannot be `-`"); - self.short_flag_aliases.push((*s, true)); + debug_assert!(s != '-', "short alias name cannot be `-`"); + self.short_flag_aliases.push((s, true)); } self } @@ -2674,15 +2578,18 @@ /// # use clap::{Command, Arg, }; /// let m = Command::new("myprog") /// .subcommand(Command::new("test").long_flag("test") - /// .visible_long_flag_aliases(&["testing", "testall", "test_all"])) + /// .visible_long_flag_aliases(["testing", "testall", "test_all"])) /// .get_matches_from(vec!["myprog", "--testing"]); /// assert_eq!(m.subcommand_name(), Some("test")); /// ``` - /// [`App::long_flag_aliases`]: Command::long_flag_aliases() + /// [`Command::long_flag_aliases`]: Command::long_flag_aliases() #[must_use] - pub fn visible_long_flag_aliases(mut self, names: &[&'help str]) -> Self { + pub fn visible_long_flag_aliases( + mut self, + names: impl IntoIterator>, + ) -> Self { for s in names { - self.long_flag_aliases.push((s, true)); + self = self.visible_long_flag_alias(s); } self } @@ -2699,7 +2606,8 @@ /// /// # Examples /// - /// ```rust + #[cfg_attr(not(feature = "help"), doc = " ```ignore")] + #[cfg_attr(feature = "help", doc = " ```")] /// # use clap::{Command, }; /// let m = Command::new("cust-ord") /// .subcommand(Command::new("alpha") // typically subcommands are grouped @@ -2724,21 +2632,20 @@ /// ```text /// cust-ord /// - /// USAGE: - /// cust-ord [OPTIONS] - /// - /// OPTIONS: - /// -h, --help Print help information - /// -V, --version Print version information + /// Usage: cust-ord [OPTIONS] /// - /// SUBCOMMANDS: + /// Commands: /// beta I should be first! /// alpha Some help and text + /// + /// Options: + /// -h, --help Print help information + /// -V, --version Print version information /// ``` #[inline] #[must_use] - pub fn display_order(mut self, ord: usize) -> Self { - self.disp_ord = Some(ord); + pub fn display_order(mut self, ord: impl IntoResettable) -> Self { + self.disp_ord = ord.into_resettable().into_option(); self } @@ -2770,7 +2677,7 @@ /// # Examples /// /// ```rust - /// # use clap::{Command, ErrorKind}; + /// # use clap::{Command, error::ErrorKind}; /// let err = Command::new("myprog") /// .subcommand_required(true) /// .subcommand(Command::new("test")) @@ -2806,6 +2713,7 @@ /// # Examples /// /// ```rust + /// # use std::ffi::OsString; /// # use clap::Command; /// // Assume there is an external subcommand named "subcmd" /// let m = Command::new("myprog") @@ -2818,7 +2726,7 @@ /// // string argument name /// match m.subcommand() { /// Some((external, ext_m)) => { - /// let ext_args: Vec<&str> = ext_m.values_of("").unwrap().collect(); + /// let ext_args: Vec<_> = ext_m.get_many::("").unwrap().collect(); /// assert_eq!(external, "subcmd"); /// assert_eq!(ext_args, ["--option", "value", "-fff", "--flag"]); /// }, @@ -2828,7 +2736,7 @@ /// /// [`subcommand`]: crate::Command::subcommand() /// [`ArgMatches`]: crate::ArgMatches - /// [`ErrorKind::UnknownArgument`]: crate::ErrorKind::UnknownArgument + /// [`ErrorKind::UnknownArgument`]: crate::error::ErrorKind::UnknownArgument pub fn allow_external_subcommands(self, yes: bool) -> Self { if yes { self.setting(AppSettings::AllowExternalSubcommands) @@ -2837,26 +2745,22 @@ } } - /// Specifies that external subcommands that are invalid UTF-8 should *not* be treated as an error. + /// Specifies how to parse external subcommand arguments. /// - /// **NOTE:** Using external subcommand argument values with invalid UTF-8 requires using - /// [`ArgMatches::values_of_os`] or [`ArgMatches::values_of_lossy`] for those particular - /// arguments which may contain invalid UTF-8 values + /// The default parser is for `OsString`. This can be used to switch it to `String` or another + /// type. /// /// **NOTE:** Setting this requires [`Command::allow_external_subcommands`] /// - /// # Platform Specific - /// - /// Non Windows systems only - /// /// # Examples /// #[cfg_attr(not(unix), doc = " ```ignore")] #[cfg_attr(unix, doc = " ```")] + /// # use std::ffi::OsString; /// # use clap::Command; + /// # use clap::value_parser; /// // Assume there is an external subcommand named "subcmd" /// let m = Command::new("myprog") - /// .allow_invalid_utf8_for_external_subcommands(true) /// .allow_external_subcommands(true) /// .get_matches_from(vec![ /// "myprog", "subcmd", "--option", "value", "-fff", "--flag" @@ -2866,7 +2770,29 @@ /// // string argument name /// match m.subcommand() { /// Some((external, ext_m)) => { - /// let ext_args: Vec<&std::ffi::OsStr> = ext_m.values_of_os("").unwrap().collect(); + /// let ext_args: Vec<_> = ext_m.get_many::("").unwrap().collect(); + /// assert_eq!(external, "subcmd"); + /// assert_eq!(ext_args, ["--option", "value", "-fff", "--flag"]); + /// }, + /// _ => {}, + /// } + /// ``` + /// + /// ``` + /// # use clap::Command; + /// # use clap::value_parser; + /// // Assume there is an external subcommand named "subcmd" + /// let m = Command::new("myprog") + /// .external_subcommand_value_parser(value_parser!(String)) + /// .get_matches_from(vec![ + /// "myprog", "subcmd", "--option", "value", "-fff", "--flag" + /// ]); + /// + /// // All trailing arguments will be stored under the subcommand's sub-matches using an empty + /// // string argument name + /// match m.subcommand() { + /// Some((external, ext_m)) => { + /// let ext_args: Vec<_> = ext_m.get_many::("").unwrap().collect(); /// assert_eq!(external, "subcmd"); /// assert_eq!(ext_args, ["--option", "value", "-fff", "--flag"]); /// }, @@ -2874,15 +2800,13 @@ /// } /// ``` /// - /// [`ArgMatches::values_of_os`]: crate::ArgMatches::values_of_os() - /// [`ArgMatches::values_of_lossy`]: crate::ArgMatches::values_of_lossy() /// [`subcommands`]: crate::Command::subcommand() - pub fn allow_invalid_utf8_for_external_subcommands(self, yes: bool) -> Self { - if yes { - self.setting(AppSettings::AllowInvalidUtf8ForExternalSubcommands) - } else { - self.unset_setting(AppSettings::AllowInvalidUtf8ForExternalSubcommands) - } + pub fn external_subcommand_value_parser( + mut self, + parser: impl IntoResettable, + ) -> Self { + self.external_value_parser = parser.into_resettable().into_option(); + self } /// Specifies that use of an argument prevents the use of [`subcommands`]. @@ -2935,18 +2859,15 @@ /// values subcommand /// ``` /// - /// **Note:** Make sure you apply it as `global_setting` if you want this setting - /// to be propagated to subcommands and sub-subcommands! - /// /// # Examples /// /// ```rust - /// # use clap::{Command, Arg}; + /// # use clap::{Command, Arg, ArgAction}; /// let cmd = Command::new("cmd").subcommand(Command::new("sub")).arg( /// Arg::new("arg") /// .long("arg") - /// .multiple_values(true) - /// .takes_value(true), + /// .num_args(1..) + /// .action(ArgAction::Set), /// ); /// /// let matches = cmd @@ -2954,7 +2875,7 @@ /// .try_get_matches_from(&["cmd", "--arg", "1", "2", "3", "sub"]) /// .unwrap(); /// assert_eq!( - /// matches.values_of("arg").unwrap().collect::>(), + /// matches.get_many::("arg").unwrap().collect::>(), /// &["1", "2", "3", "sub"] /// ); /// assert!(matches.subcommand_matches("sub").is_none()); @@ -2964,7 +2885,7 @@ /// .try_get_matches_from(&["cmd", "--arg", "1", "2", "3", "sub"]) /// .unwrap(); /// assert_eq!( - /// matches.values_of("arg").unwrap().collect::>(), + /// matches.get_many::("arg").unwrap().collect::>(), /// &["1", "2", "3"] /// ); /// assert!(matches.subcommand_matches("sub").is_some()); @@ -2991,7 +2912,7 @@ /// This first example shows that it is an error to not use a required argument /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind}; /// let err = Command::new("myprog") /// .subcommand_negates_reqs(true) /// .arg(Arg::new("opt").required(true)) @@ -3008,7 +2929,7 @@ /// valid subcommand is used. /// /// ```rust - /// # use clap::{Command, Arg, ErrorKind}; + /// # use clap::{Command, Arg, error::ErrorKind}; /// let noerr = Command::new("myprog") /// .subcommand_negates_reqs(true) /// .arg(Arg::new("opt").required(true)) @@ -3046,7 +2967,7 @@ /// /// Setting `multicall` will cause /// - `argv[0]` to be stripped to the base name and parsed as the first argument, as if - /// [`Command::no_binary_name`][App::no_binary_name] was set. + /// [`Command::no_binary_name`][Command::no_binary_name] was set. /// - Help and errors to report subcommands as if they were the top-level command /// /// When the subcommand is not present, there are several strategies you may employ, depending @@ -3054,13 +2975,13 @@ /// - Let the error percolate up normally /// - Print a specialized error message using the /// [`Error::context`][crate::Error::context] - /// - Print the [help][App::write_help] but this might be ambiguous + /// - Print the [help][Command::write_help] but this might be ambiguous /// - Disable `multicall` and re-parse it /// - Disable `multicall` and re-parse it with a specific subcommand /// /// When detecting the error condition, the [`ErrorKind`] isn't sufficient as a sub-subcommand /// might report the same error. Enable - /// [`allow_external_subcommands`][App::allow_external_subcommands] if you want to specifically + /// [`allow_external_subcommands`][Command::allow_external_subcommands] if you want to specifically /// get the unrecognized binary name. /// /// **NOTE:** Multicall can't be used with [`no_binary_name`] since they interpret @@ -3092,7 +3013,7 @@ /// This does not allow the subcommand to be passed as the first non-path argument. /// /// ```rust - /// # use clap::{Command, ErrorKind}; + /// # use clap::{Command, error::ErrorKind}; /// let mut cmd = Command::new("hostname") /// .multicall(true) /// .subcommand(Command::new("hostname")) @@ -3120,7 +3041,7 @@ /// /// ```rust /// # use clap::Command; - /// fn applet_commands() -> [Command<'static>; 2] { + /// fn applet_commands() -> [Command; 2] { /// [Command::new("true"), Command::new("false")] /// } /// let mut cmd = Command::new("busybox") @@ -3143,8 +3064,8 @@ /// ``` /// /// [`no_binary_name`]: crate::Command::no_binary_name - /// [`App::subcommand_value_name`]: crate::Command::subcommand_value_name - /// [`App::subcommand_help_heading`]: crate::Command::subcommand_help_heading + /// [`Command::subcommand_value_name`]: crate::Command::subcommand_value_name + /// [`Command::subcommand_help_heading`]: crate::Command::subcommand_help_heading #[inline] pub fn multicall(self, yes: bool) -> Self { if yes { @@ -3156,7 +3077,7 @@ /// Sets the value name used for subcommands when printing usage and help. /// - /// By default, this is "SUBCOMMAND". + /// By default, this is "COMMAND". /// /// See also [`Command::subcommand_help_heading`] /// @@ -3175,16 +3096,15 @@ /// ```text /// myprog /// - /// USAGE: - /// myprog [SUBCOMMAND] - /// - /// OPTIONS: - /// -h, --help Print help information - /// -V, --version Print version information + /// Usage: myprog [COMMAND] /// - /// SUBCOMMANDS: + /// Commands: /// help Print this message or the help of the given subcommand(s) /// sub1 + /// + /// Options: + /// -h, --help Print help information + /// -V, --version Print version information /// ``` /// /// but usage of `subcommand_value_name` @@ -3203,29 +3123,25 @@ /// ```text /// myprog /// - /// USAGE: - /// myprog [THING] - /// - /// OPTIONS: - /// -h, --help Print help information - /// -V, --version Print version information + /// Usage: myprog [THING] /// - /// SUBCOMMANDS: + /// Commands: /// help Print this message or the help of the given subcommand(s) /// sub1 + /// + /// Options: + /// -h, --help Print help information + /// -V, --version Print version information /// ``` #[must_use] - pub fn subcommand_value_name(mut self, value_name: S) -> Self - where - S: Into<&'help str>, - { - self.subcommand_value_name = Some(value_name.into()); + pub fn subcommand_value_name(mut self, value_name: impl IntoResettable) -> Self { + self.subcommand_value_name = value_name.into_resettable().into_option(); self } /// Sets the help heading used for subcommands when printing usage and help. /// - /// By default, this is "SUBCOMMANDS". + /// By default, this is "Commands". /// /// See also [`Command::subcommand_value_name`] /// @@ -3244,16 +3160,15 @@ /// ```text /// myprog /// - /// USAGE: - /// myprog [SUBCOMMAND] - /// - /// OPTIONS: - /// -h, --help Print help information - /// -V, --version Print version information + /// Usage: myprog [COMMAND] /// - /// SUBCOMMANDS: + /// Commands: /// help Print this message or the help of the given subcommand(s) /// sub1 + /// + /// Options: + /// -h, --help Print help information + /// -V, --version Print version information /// ``` /// /// but usage of `subcommand_help_heading` @@ -3262,7 +3177,7 @@ /// # use clap::{Command, Arg}; /// Command::new("myprog") /// .subcommand(Command::new("sub1")) - /// .subcommand_help_heading("THINGS") + /// .subcommand_help_heading("Things") /// .print_help() /// # ; /// ``` @@ -3272,30 +3187,27 @@ /// ```text /// myprog /// - /// USAGE: - /// myprog [SUBCOMMAND] - /// - /// OPTIONS: - /// -h, --help Print help information - /// -V, --version Print version information + /// Usage: myprog [COMMAND] /// - /// THINGS: + /// Things: /// help Print this message or the help of the given subcommand(s) /// sub1 + /// + /// Options: + /// -h, --help Print help information + /// -V, --version Print version information /// ``` #[must_use] - pub fn subcommand_help_heading(mut self, heading: T) -> Self - where - T: Into<&'help str>, - { - self.subcommand_heading = Some(heading.into()); + pub fn subcommand_help_heading(mut self, heading: impl IntoResettable) -> Self { + self.subcommand_heading = heading.into_resettable().into_option(); self } } /// # Reflection -impl<'help> App<'help> { +impl Command { #[inline] + #[cfg(feature = "usage")] pub(crate) fn get_usage_name(&self) -> Option<&str> { self.usage_name.as_deref() } @@ -3313,32 +3225,38 @@ } /// Set binary name. Uses `&mut self` instead of `self`. - pub fn set_bin_name>(&mut self, name: S) { + pub fn set_bin_name(&mut self, name: impl Into) { self.bin_name = Some(name.into()); } /// Get the name of the cmd. #[inline] pub fn get_name(&self) -> &str { + self.name.as_str() + } + + #[inline] + #[cfg(debug_assertions)] + pub(crate) fn get_name_str(&self) -> &Str { &self.name } /// Get the version of the cmd. #[inline] - pub fn get_version(&self) -> Option<&'help str> { - self.version + pub fn get_version(&self) -> Option<&str> { + self.version.as_deref() } /// Get the long version of the cmd. #[inline] - pub fn get_long_version(&self) -> Option<&'help str> { - self.long_version + pub fn get_long_version(&self) -> Option<&str> { + self.long_version.as_deref() } /// Get the authors of the cmd. #[inline] - pub fn get_author(&self) -> Option<&'help str> { - self.author + pub fn get_author(&self) -> Option<&str> { + self.author.as_deref() } /// Get the short flag of the subcommand. @@ -3349,48 +3267,41 @@ /// Get the long flag of the subcommand. #[inline] - pub fn get_long_flag(&self) -> Option<&'help str> { - self.long_flag + pub fn get_long_flag(&self) -> Option<&str> { + self.long_flag.as_deref() } /// Get the help message specified via [`Command::about`]. /// - /// [`App::about`]: Command::about() + /// [`Command::about`]: Command::about() #[inline] - pub fn get_about(&self) -> Option<&'help str> { - self.about + pub fn get_about(&self) -> Option<&StyledStr> { + self.about.as_ref() } /// Get the help message specified via [`Command::long_about`]. /// - /// [`App::long_about`]: Command::long_about() - #[inline] - pub fn get_long_about(&self) -> Option<&'help str> { - self.long_about - } - - /// Deprecated, replaced with [`Command::get_next_help_heading`] + /// [`Command::long_about`]: Command::long_about() #[inline] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.0", note = "Replaced with `App::get_next_help_heading`") - )] - pub fn get_help_heading(&self) -> Option<&'help str> { - self.get_next_help_heading() + pub fn get_long_about(&self) -> Option<&StyledStr> { + self.long_about.as_ref() } - /// Get the custom section heading specified via [`Command::help_heading`]. + /// Get the custom section heading specified via [`Command::next_help_heading`]. /// - /// [`App::help_heading`]: Command::help_heading() + /// [`Command::help_heading`]: Command::help_heading() #[inline] - pub fn get_next_help_heading(&self) -> Option<&'help str> { - self.current_help_heading + pub fn get_next_help_heading(&self) -> Option<&str> { + self.current_help_heading.as_deref() } /// Iterate through the *visible* aliases for this subcommand. #[inline] - pub fn get_visible_aliases(&self) -> impl Iterator + '_ { - self.aliases.iter().filter(|(_, vis)| *vis).map(|a| a.0) + pub fn get_visible_aliases(&self) -> impl Iterator + '_ { + self.aliases + .iter() + .filter(|(_, vis)| *vis) + .map(|a| a.0.as_str()) } /// Iterate through the *visible* short aliases for this subcommand. @@ -3404,17 +3315,17 @@ /// Iterate through the *visible* long aliases for this subcommand. #[inline] - pub fn get_visible_long_flag_aliases(&self) -> impl Iterator + '_ { + pub fn get_visible_long_flag_aliases(&self) -> impl Iterator + '_ { self.long_flag_aliases .iter() .filter(|(_, vis)| *vis) - .map(|a| a.0) + .map(|a| a.0.as_str()) } /// Iterate through the set of *all* the aliases for this subcommand, both visible and hidden. #[inline] pub fn get_all_aliases(&self) -> impl Iterator + '_ { - self.aliases.iter().map(|a| a.0) + self.aliases.iter().map(|a| a.0.as_str()) } /// Iterate through the set of *all* the short aliases for this subcommand, both visible and hidden. @@ -3425,28 +3336,20 @@ /// Iterate through the set of *all* the long aliases for this subcommand, both visible and hidden. #[inline] - pub fn get_all_long_flag_aliases(&self) -> impl Iterator + '_ { - self.long_flag_aliases.iter().map(|a| a.0) + pub fn get_all_long_flag_aliases(&self) -> impl Iterator + '_ { + self.long_flag_aliases.iter().map(|a| a.0.as_str()) } - /// Check if the given [`AppSettings`] variant is currently set on the `Command`. - /// - /// This checks both [local] and [global settings]. - /// - /// [local]: Command::setting() - /// [global settings]: Command::global_setting() #[inline] - pub fn is_set(&self, s: AppSettings) -> bool { + pub(crate) fn is_set(&self, s: AppSettings) -> bool { self.settings.is_set(s) || self.g_settings.is_set(s) } /// Should we color the output? - #[inline(never)] pub fn get_color(&self) -> ColorChoice { debug!("Command::color: Color setting..."); if cfg!(feature = "color") { - #[allow(deprecated)] if self.is_set(AppSettings::ColorNever) { debug!("Never"); ColorChoice::Never @@ -3464,13 +3367,13 @@ /// Iterate through the set of subcommands, getting a reference to each. #[inline] - pub fn get_subcommands(&self) -> impl Iterator> { + pub fn get_subcommands(&self) -> impl Iterator { self.subcommands.iter() } /// Iterate through the set of subcommands, getting a mutable reference to each. #[inline] - pub fn get_subcommands_mut(&mut self) -> impl Iterator> { + pub fn get_subcommands_mut(&mut self) -> impl Iterator { self.subcommands.iter_mut() } @@ -3483,60 +3386,45 @@ /// Returns the help heading for listing subcommands. #[inline] pub fn get_subcommand_help_heading(&self) -> Option<&str> { - self.subcommand_heading - } - - /// Deprecated, replaced with [`App::get_subcommand_help_heading`] - #[inline] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.0", - note = "Replaced with `App::get_subcommand_help_heading`" - ) - )] - pub fn get_subommand_help_heading(&self) -> Option<&str> { - self.get_subcommand_help_heading() + self.subcommand_heading.as_deref() } /// Returns the subcommand value name. #[inline] pub fn get_subcommand_value_name(&self) -> Option<&str> { - self.subcommand_value_name + self.subcommand_value_name.as_deref() } /// Returns the help heading for listing subcommands. #[inline] - pub fn get_before_help(&self) -> Option<&str> { - self.before_help + pub fn get_before_help(&self) -> Option<&StyledStr> { + self.before_help.as_ref() } /// Returns the help heading for listing subcommands. #[inline] - pub fn get_before_long_help(&self) -> Option<&str> { - self.before_long_help + pub fn get_before_long_help(&self) -> Option<&StyledStr> { + self.before_long_help.as_ref() } /// Returns the help heading for listing subcommands. #[inline] - pub fn get_after_help(&self) -> Option<&str> { - self.after_help + pub fn get_after_help(&self) -> Option<&StyledStr> { + self.after_help.as_ref() } /// Returns the help heading for listing subcommands. #[inline] - pub fn get_after_long_help(&self) -> Option<&str> { - self.after_long_help + pub fn get_after_long_help(&self) -> Option<&StyledStr> { + self.after_long_help.as_ref() } /// Find subcommand such that its name or one of aliases equals `name`. /// /// This does not recurse through subcommands of subcommands. #[inline] - pub fn find_subcommand(&self, name: &T) -> Option<&App<'help>> - where - T: PartialEq + ?Sized, - { + pub fn find_subcommand(&self, name: impl AsRef) -> Option<&Command> { + let name = name.as_ref(); self.get_subcommands().find(|s| s.aliases_to(name)) } @@ -3545,33 +3433,34 @@ /// /// This does not recurse through subcommands of subcommands. #[inline] - pub fn find_subcommand_mut(&mut self, name: &T) -> Option<&mut App<'help>> - where - T: PartialEq + ?Sized, - { + pub fn find_subcommand_mut( + &mut self, + name: impl AsRef, + ) -> Option<&mut Command> { + let name = name.as_ref(); self.get_subcommands_mut().find(|s| s.aliases_to(name)) } /// Iterate through the set of groups. #[inline] - pub fn get_groups(&self) -> impl Iterator> { + pub fn get_groups(&self) -> impl Iterator { self.groups.iter() } /// Iterate through the set of arguments. #[inline] - pub fn get_arguments(&self) -> impl Iterator> { + pub fn get_arguments(&self) -> impl Iterator { self.args.args() } /// Iterate through the *positionals* arguments. #[inline] - pub fn get_positionals(&self) -> impl Iterator> { + pub fn get_positionals(&self) -> impl Iterator { self.get_arguments().filter(|a| a.is_positional()) } /// Iterate through the *options*. - pub fn get_opts(&self) -> impl Iterator> { + pub fn get_opts(&self) -> impl Iterator { self.get_arguments() .filter(|a| a.is_takes_value_set() && !a.is_positional()) } @@ -3585,7 +3474,7 @@ /// /// If the given arg contains a conflict with an argument that is unknown to /// this `Command`. - pub fn get_arg_conflicts_with(&self, arg: &Arg) -> Vec<&Arg<'help>> // FIXME: This could probably have been an iterator + pub fn get_arg_conflicts_with(&self, arg: &Arg) -> Vec<&Arg> // FIXME: This could probably have been an iterator { if arg.is_global_set() { self.get_global_arg_conflicts_with(arg) @@ -3616,8 +3505,8 @@ // ### Panics // // If the given arg contains a conflict with an argument that is unknown to - // this `App`. - fn get_global_arg_conflicts_with(&self, arg: &Arg) -> Vec<&Arg<'help>> // FIXME: This could probably have been an iterator + // this `Command`. + fn get_global_arg_conflicts_with(&self, arg: &Arg) -> Vec<&Arg> // FIXME: This could probably have been an iterator { arg.blacklist .iter() @@ -3629,7 +3518,7 @@ .iter() .flat_map(|x| x.args.args()), ) - .find(|arg| arg.id == *id) + .find(|arg| arg.get_id() == id) .expect( "Command::get_arg_conflicts_with: \ The passed arg conflicts with an arg unknown to the cmd", @@ -3651,10 +3540,14 @@ // Subcommand_1.1 (doesn't contain Arg) // Subcommand_1.1.1 (contains Arg) // - fn get_subcommands_containing(&self, arg: &Arg) -> Vec<&App<'help>> { + fn get_subcommands_containing(&self, arg: &Arg) -> Vec<&Self> { let mut vec = std::vec::Vec::new(); for idx in 0..self.subcommands.len() { - if self.subcommands[idx].args.args().any(|ar| ar.id == arg.id) { + if self.subcommands[idx] + .args + .args() + .any(|ar| ar.get_id() == arg.get_id()) + { vec.push(&self.subcommands[idx]); vec.append(&mut self.subcommands[idx].get_subcommands_containing(arg)); } @@ -3680,6 +3573,7 @@ /// Report whether [`Command::disable_version_flag`] is set pub fn is_disable_version_flag_set(&self) -> bool { self.is_set(AppSettings::DisableVersionFlag) + || (self.version.is_none() && self.long_version.is_none()) } /// Report whether [`Command::propagate_version`] is set @@ -3713,9 +3607,13 @@ self.is_set(AppSettings::HelpExpected) } - /// Report whether [`Command::dont_collapse_args_in_usage`] is set + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "This is now the default") + )] pub fn is_dont_collapse_args_in_usage_set(&self) -> bool { - self.is_set(AppSettings::DontCollapseArgsInUsage) + true } /// Report whether [`Command::infer_long_args`] is set @@ -3733,17 +3631,35 @@ self.is_set(AppSettings::ArgRequiredElseHelp) } - /// Report whether [`Command::allow_hyphen_values`] is set + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated( + since = "4.0.0", + note = "Replaced with `Arg::is_allow_hyphen_values_set`" + ) + )] pub(crate) fn is_allow_hyphen_values_set(&self) -> bool { self.is_set(AppSettings::AllowHyphenValues) } - /// Report whether [`Command::allow_negative_numbers`] is set + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated( + since = "4.0.0", + note = "Replaced with `Arg::is_allow_negative_numbers_set`" + ) + )] pub fn is_allow_negative_numbers_set(&self) -> bool { self.is_set(AppSettings::AllowNegativeNumbers) } - /// Report whether [`Command::trailing_var_arg`] is set + #[doc(hidden)] + #[cfg_attr( + feature = "deprecated", + deprecated(since = "4.0.0", note = "Replaced with `Arg::is_trailing_var_arg_set`") + )] pub fn is_trailing_var_arg_set(&self) -> bool { self.is_set(AppSettings::TrailingVarArg) } @@ -3768,31 +3684,22 @@ self.is_set(AppSettings::AllowExternalSubcommands) } - /// Report whether [`Command::allow_invalid_utf8_for_external_subcommands`] is set - pub fn is_allow_invalid_utf8_for_external_subcommands_set(&self) -> bool { - self.is_set(AppSettings::AllowInvalidUtf8ForExternalSubcommands) - } - /// Configured parser for values passed to an external subcommand /// /// # Example /// /// ```rust /// let cmd = clap::Command::new("raw") - /// .allow_external_subcommands(true) - /// .allow_invalid_utf8_for_external_subcommands(true); + /// .external_subcommand_value_parser(clap::value_parser!(String)); /// let value_parser = cmd.get_external_subcommand_value_parser(); /// println!("{:?}", value_parser); /// ``` pub fn get_external_subcommand_value_parser(&self) -> Option<&super::ValueParser> { if !self.is_allow_external_subcommands_set() { None - } else if self.is_allow_invalid_utf8_for_external_subcommands_set() { - static DEFAULT: super::ValueParser = super::ValueParser::os_string(); - Some(&DEFAULT) } else { - static DEFAULT: super::ValueParser = super::ValueParser::string(); - Some(&DEFAULT) + static DEFAULT: super::ValueParser = super::ValueParser::os_string(); + Some(self.external_value_parser.as_ref().unwrap_or(&DEFAULT)) } } @@ -3801,6 +3708,11 @@ self.is_set(AppSettings::ArgsNegateSubcommands) } + #[doc(hidden)] + pub fn is_args_override_self(&self) -> bool { + self.is_set(AppSettings::AllArgsOverrideSelf) + } + /// Report whether [`Command::subcommand_precedence_over_arg`] is set pub fn is_subcommand_precedence_over_arg_set(&self) -> bool { self.is_set(AppSettings::SubcommandPrecedenceOverArg) @@ -3817,358 +3729,36 @@ } } -/// Deprecated -impl<'help> App<'help> { - /// Deprecated in [Issue #3087](https://github.com/clap-rs/clap/issues/3087), maybe [`clap::Parser`][crate::Parser] would fit your use case? - #[cfg(feature = "yaml")] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Deprecated in Issue #3087, maybe clap::Parser would fit your use case?" - ) - )] - #[doc(hidden)] - pub fn from_yaml(y: &'help Yaml) -> Self { - #![allow(deprecated)] - let yaml_file_hash = y.as_hash().expect("YAML file must be a hash"); - // We WANT this to panic on error...so expect() is good. - let (mut a, yaml, err) = if let Some(name) = y["name"].as_str() { - (App::new(name), yaml_file_hash, "cmd".into()) - } else { - let (name_yaml, value_yaml) = yaml_file_hash - .iter() - .next() - .expect("There must be one subcommand in the YAML file"); - let name_str = name_yaml - .as_str() - .expect("Subcommand name must be a string"); - - ( - App::new(name_str), - value_yaml.as_hash().expect("Subcommand must be a hash"), - format!("subcommand '{}'", name_str), - ) - }; - - for (k, v) in yaml { - a = match k.as_str().expect("App fields must be strings") { - "version" => yaml_to_str!(a, v, version), - "long_version" => yaml_to_str!(a, v, long_version), - "author" => yaml_to_str!(a, v, author), - "bin_name" => yaml_to_str!(a, v, bin_name), - "about" => yaml_to_str!(a, v, about), - "long_about" => yaml_to_str!(a, v, long_about), - "before_help" => yaml_to_str!(a, v, before_help), - "after_help" => yaml_to_str!(a, v, after_help), - "template" => yaml_to_str!(a, v, help_template), - "usage" => yaml_to_str!(a, v, override_usage), - "help" => yaml_to_str!(a, v, override_help), - "help_message" => yaml_to_str!(a, v, help_message), - "version_message" => yaml_to_str!(a, v, version_message), - "alias" => yaml_to_str!(a, v, alias), - "aliases" => yaml_vec_or_str!(a, v, alias), - "visible_alias" => yaml_to_str!(a, v, visible_alias), - "visible_aliases" => yaml_vec_or_str!(a, v, visible_alias), - "display_order" => yaml_to_usize!(a, v, display_order), - "args" => { - if let Some(vec) = v.as_vec() { - for arg_yaml in vec { - a = a.arg(Arg::from_yaml(arg_yaml)); - } - } else { - panic!("Failed to convert YAML value {:?} to a vec", v); - } - a - } - "subcommands" => { - if let Some(vec) = v.as_vec() { - for sc_yaml in vec { - a = a.subcommand(App::from_yaml(sc_yaml)); - } - } else { - panic!("Failed to convert YAML value {:?} to a vec", v); - } - a - } - "groups" => { - if let Some(vec) = v.as_vec() { - for ag_yaml in vec { - a = a.group(ArgGroup::from(ag_yaml)); - } - } else { - panic!("Failed to convert YAML value {:?} to a vec", v); - } - a - } - "setting" | "settings" => { - yaml_to_setting!(a, v, setting, AppSettings, "AppSetting", err) - } - "global_setting" | "global_settings" => { - yaml_to_setting!(a, v, global_setting, AppSettings, "AppSetting", err) - } - _ => a, - } - } - - a - } - - /// Deprecated, replaced with [`Command::override_usage`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::override_usage`") - )] - #[doc(hidden)] - #[must_use] - pub fn usage>(self, usage: S) -> Self { - self.override_usage(usage) - } - - /// Deprecated, replaced with [`Command::override_help`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::override_help`") - )] - #[doc(hidden)] - #[must_use] - pub fn help>(self, help: S) -> Self { - self.override_help(help) - } - - /// Deprecated, replaced with [`Command::mut_arg`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::mut_arg`") - )] - #[doc(hidden)] - #[must_use] - pub fn help_short(self, c: char) -> Self { - self.mut_arg("help", |a| a.short(c)) - } - - /// Deprecated, replaced with [`Command::mut_arg`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::mut_arg`") - )] - #[doc(hidden)] - #[must_use] - pub fn version_short(self, c: char) -> Self { - self.mut_arg("version", |a| a.short(c)) - } - - /// Deprecated, replaced with [`Command::mut_arg`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::mut_arg`") - )] - #[doc(hidden)] - #[must_use] - pub fn help_message(self, s: impl Into<&'help str>) -> Self { - self.mut_arg("help", |a| a.help(s.into())) - } - - /// Deprecated, replaced with [`Command::mut_arg`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::mut_arg`") - )] - #[doc(hidden)] - #[must_use] - pub fn version_message(self, s: impl Into<&'help str>) -> Self { - self.mut_arg("version", |a| a.help(s.into())) - } - - /// Deprecated, replaced with [`Command::help_template`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::help_template`") - )] - #[doc(hidden)] - #[must_use] - pub fn template>(self, s: S) -> Self { - self.help_template(s) - } - - /// Deprecated, replaced with [`Command::setting(a| b)`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::setting(a | b)`") - )] - #[doc(hidden)] - #[must_use] - pub fn settings(mut self, settings: &[AppSettings]) -> Self { - for s in settings { - self.settings.insert((*s).into()); - } - self - } - - /// Deprecated, replaced with [`Command::unset_setting(a| b)`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::unset_setting(a | b)`") - )] - #[doc(hidden)] - #[must_use] - pub fn unset_settings(mut self, settings: &[AppSettings]) -> Self { - for s in settings { - self.settings.remove((*s).into()); - } - self - } - - /// Deprecated, replaced with [`Command::global_setting(a| b)`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::global_setting(a | b)`") - )] - #[doc(hidden)] - #[must_use] - pub fn global_settings(mut self, settings: &[AppSettings]) -> Self { - for s in settings { - self.settings.insert((*s).into()); - self.g_settings.insert((*s).into()); - } - self - } - - /// Deprecated, replaced with [`Command::term_width`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::term_width`") - )] - #[doc(hidden)] - #[must_use] - pub fn set_term_width(self, width: usize) -> Self { - self.term_width(width) - } - - /// Deprecated in [Issue #3086](https://github.com/clap-rs/clap/issues/3086), see [`arg!`][crate::arg!]. - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Deprecated in Issue #3086, see `clap::arg!") - )] - #[doc(hidden)] - #[must_use] - pub fn arg_from_usage(self, usage: &'help str) -> Self { - #![allow(deprecated)] - self.arg(Arg::from_usage(usage)) - } - - /// Deprecated in [Issue #3086](https://github.com/clap-rs/clap/issues/3086), see [`arg!`][crate::arg!]. - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Deprecated in Issue #3086, see `clap::arg!") - )] - #[doc(hidden)] - #[must_use] - pub fn args_from_usage(mut self, usage: &'help str) -> Self { - #![allow(deprecated)] - for line in usage.lines() { - let l = line.trim(); - if l.is_empty() { - continue; - } - self = self.arg(Arg::from_usage(l)); - } - self - } - - /// Deprecated, replaced with [`Command::render_version`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::render_version`") - )] - #[doc(hidden)] - pub fn write_version(&self, w: &mut W) -> ClapResult<()> { - write!(w, "{}", self.render_version()).map_err(From::from) - } - - /// Deprecated, replaced with [`Command::render_long_version`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::render_long_version`") - )] - #[doc(hidden)] - pub fn write_long_version(&self, w: &mut W) -> ClapResult<()> { - write!(w, "{}", self.render_long_version()).map_err(From::from) - } - - /// Deprecated, replaced with [`Command::try_get_matches`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::try_get_matches`") - )] - #[doc(hidden)] - pub fn get_matches_safe(self) -> ClapResult { - self.try_get_matches() - } - - /// Deprecated, replaced with [`Command::try_get_matches_from`] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.0.0", note = "Replaced with `App::try_get_matches_from`") - )] - #[doc(hidden)] - pub fn get_matches_from_safe(self, itr: I) -> ClapResult - where - I: IntoIterator, - T: Into + Clone, - { - self.try_get_matches_from(itr) - } - - /// Deprecated, replaced with [`Command::try_get_matches_from_mut`] - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.0.0", - note = "Replaced with `App::try_get_matches_from_mut`" - ) - )] - #[doc(hidden)] - pub fn get_matches_from_safe_borrow(&mut self, itr: I) -> ClapResult - where - I: IntoIterator, - T: Into + Clone, - { - self.try_get_matches_from_mut(itr) - } -} - // Internally used only -impl<'help> App<'help> { - pub(crate) fn get_id(&self) -> Id { - self.id.clone() - } - - pub(crate) fn get_override_usage(&self) -> Option<&str> { - self.usage_str +impl Command { + pub(crate) fn get_override_usage(&self) -> Option<&StyledStr> { + self.usage_str.as_ref() } - pub(crate) fn get_override_help(&self) -> Option<&str> { - self.help_str + pub(crate) fn get_override_help(&self) -> Option<&StyledStr> { + self.help_str.as_ref() } - pub(crate) fn get_help_template(&self) -> Option<&str> { - self.template + #[cfg(feature = "help")] + pub(crate) fn get_help_template(&self) -> Option<&StyledStr> { + self.template.as_ref() } + #[cfg(feature = "help")] pub(crate) fn get_term_width(&self) -> Option { self.term_w } + #[cfg(feature = "help")] pub(crate) fn get_max_term_width(&self) -> Option { self.max_w } - pub(crate) fn get_replacement(&self, key: &str) -> Option<&[&str]> { - self.replacers.get(key).copied() + pub(crate) fn get_replacement(&self, key: &str) -> Option<&[Str]> { + self.replacers.get(key).map(|v| v.as_slice()) } - pub(crate) fn get_keymap(&self) -> &MKeyMap<'help> { + pub(crate) fn get_keymap(&self) -> &MKeyMap { &self.args } @@ -4195,7 +3785,7 @@ // If there are global arguments, or settings we need to propagate them down to subcommands // before parsing in case we run into a subcommand - self._build_self(); + self._build_self(false); let mut matcher = ArgMatcher::new(self); @@ -4217,50 +3807,23 @@ Ok(matcher.into_inner()) } - #[doc(hidden)] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.10", note = "Replaced with `Command::build`") - )] - pub fn _build_all(&mut self) { - self.build(); - } - - #[doc(hidden)] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.10", note = "Replaced with `Command::build`") - )] - pub fn _build(&mut self) { - self._build_self() - } - - #[doc(hidden)] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.13", note = "Replaced with `Command::build`") - )] - pub fn _build_bin_names(&mut self) { - self._build_bin_names_internal(); - } - /// Prepare for introspecting on all included [`Command`]s /// /// Call this on the top-level [`Command`] when done building and before reading state for /// cases like completions, custom help output, etc. pub fn build(&mut self) { - self._build_recursive(); + self._build_recursive(true); self._build_bin_names_internal(); } - pub(crate) fn _build_recursive(&mut self) { - self._build_self(); + pub(crate) fn _build_recursive(&mut self, expand_help_tree: bool) { + self._build_self(expand_help_tree); for subcmd in self.get_subcommands_mut() { - subcmd._build_recursive(); + subcmd._build_recursive(expand_help_tree); } } - pub(crate) fn _build_self(&mut self) { + pub(crate) fn _build_self(&mut self, expand_help_tree: bool) { debug!("Command::_build: name={:?}", self.get_name()); if !self.settings.is_set(AppSettings::Built) { // Make sure all the globally set flags apply to us as well @@ -4271,63 +3834,47 @@ self.settings.insert(AppSettings::DisableHelpFlag.into()); self.settings.insert(AppSettings::DisableVersionFlag.into()); } + if !cfg!(feature = "help") && self.get_override_help().is_none() { + self.settings.insert(AppSettings::DisableHelpFlag.into()); + self.settings + .insert(AppSettings::DisableHelpSubcommand.into()); + } + if self.is_set(AppSettings::ArgsNegateSubcommands) { + self.settings + .insert(AppSettings::SubcommandsNegateReqs.into()); + } + if self.external_value_parser.is_some() { + self.settings + .insert(AppSettings::AllowExternalSubcommands.into()); + } + if !self.has_subcommands() { + self.settings + .insert(AppSettings::DisableHelpSubcommand.into()); + } self._propagate(); - self._check_help_and_version(); + self._check_help_and_version(expand_help_tree); self._propagate_global_args(); - self._derive_display_order(); let mut pos_counter = 1; - let self_override = self.is_set(AppSettings::AllArgsOverrideSelf); let hide_pv = self.is_set(AppSettings::HidePossibleValues); - let auto_help = - !self.is_set(AppSettings::NoAutoHelp) && !self.is_disable_help_flag_set(); - let auto_version = - !self.is_set(AppSettings::NoAutoVersion) && !self.is_disable_version_flag_set(); for a in self.args.args_mut() { // Fill in the groups for g in &a.groups { if let Some(ag) = self.groups.iter_mut().find(|grp| grp.id == *g) { - ag.args.push(a.id.clone()); + ag.args.push(a.get_id().clone()); } else { - let mut ag = ArgGroup::with_id(g.clone()); - ag.args.push(a.id.clone()); + let mut ag = ArgGroup::new(g); + ag.args.push(a.get_id().clone()); self.groups.push(ag); } } // Figure out implied settings - if a.is_last_set() { - // if an arg has `Last` set, we need to imply DontCollapseArgsInUsage so that args - // in the usage string don't get confused or left out. - self.settings.set(AppSettings::DontCollapseArgsInUsage); - } + a._build(); if hide_pv && a.is_takes_value_set() { a.settings.set(ArgSettings::HidePossibleValues); } - if self_override { - let self_id = a.id.clone(); - a.overrides.push(self_id); - } - a._build(); - // HACK: Setting up action at this level while auto-help / disable help flag is - // required. Otherwise, most of this won't be needed because when we can break - // compat, actions will reign supreme (default to `Store`) - if a.action.is_none() { - if a.get_id() == "help" && auto_help && !a.is_takes_value_set() { - let action = super::ArgAction::Help; - a.action = Some(action); - } else if a.get_id() == "version" && auto_version && !a.is_takes_value_set() { - let action = super::ArgAction::Version; - a.action = Some(action); - } else if a.is_takes_value_set() { - let action = super::ArgAction::StoreValue; - a.action = Some(action); - } else { - let action = super::ArgAction::IncOccurrence; - a.action = Some(action); - } - } if a.is_positional() && a.index.is_none() { a.index = Some(pos_counter); pos_counter += 1; @@ -4336,6 +3883,37 @@ self.args._build(); + #[allow(deprecated)] + { + let highest_idx = self + .get_keymap() + .keys() + .filter_map(|x| { + if let crate::mkeymap::KeyType::Position(n) = x { + Some(*n) + } else { + None + } + }) + .max() + .unwrap_or(0); + let is_trailing_var_arg_set = self.is_trailing_var_arg_set(); + let is_allow_hyphen_values_set = self.is_allow_hyphen_values_set(); + let is_allow_negative_numbers_set = self.is_allow_negative_numbers_set(); + for arg in self.args.args_mut() { + if is_allow_hyphen_values_set && arg.is_takes_value_set() { + arg.settings.insert(ArgSettings::AllowHyphenValues.into()); + } + if is_allow_negative_numbers_set && arg.is_takes_value_set() { + arg.settings + .insert(ArgSettings::AllowNegativeNumbers.into()); + } + if is_trailing_var_arg_set && arg.get_index() == Some(highest_idx) { + arg.settings.insert(ArgSettings::TrailingVarArg.into()); + } + } + } + #[cfg(debug_assertions)] assert_app(self); self.settings.set(AppSettings::Built); @@ -4348,27 +3926,29 @@ use std::fmt::Write; let mut mid_string = String::from(" "); + #[cfg(feature = "usage")] if !self.is_subcommand_negates_reqs_set() && !self.is_args_conflicts_with_subcommands_set() { let reqs = Usage::new(self).get_required_usage_from(&[], None, true); // maybe Some(m) for s in &reqs { - mid_string.push_str(s); + mid_string.push_str(&s.to_string()); mid_string.push(' '); } } let is_multicall_set = self.is_multicall_set(); - let sc = self.subcommands.iter_mut().find(|s| s.name == name)?; + let sc = some!(self.subcommands.iter_mut().find(|s| s.name == name)); // Display subcommand name, short and long in usage - let mut sc_names = sc.name.clone(); + let mut sc_names = String::new(); + sc_names.push_str(sc.name.as_str()); let mut flag_subcmd = false; - if let Some(l) = sc.long_flag { + if let Some(l) = sc.get_long_flag() { write!(sc_names, "|--{}", l).unwrap(); flag_subcmd = true; } - if let Some(s) = sc.short_flag { + if let Some(s) = sc.get_short_flag() { write!(sc_names, "|-{}", s).unwrap(); flag_subcmd = true; } @@ -4388,7 +3968,7 @@ // a space let bin_name = format!( "{}{}{}", - self.bin_name.as_ref().unwrap_or(&String::new()), + self.bin_name.as_deref().unwrap_or_default(), if self.bin_name.is_some() { " " } else { "" }, &*sc.name ); @@ -4422,7 +4002,7 @@ } // Ensure all args are built and ready to parse - sc._build_self(); + sc._build_self(false); Some(sc) } @@ -4432,13 +4012,14 @@ if !self.is_set(AppSettings::BinNameBuilt) { let mut mid_string = String::from(" "); + #[cfg(feature = "usage")] if !self.is_subcommand_negates_reqs_set() && !self.is_args_conflicts_with_subcommands_set() { let reqs = Usage::new(self).get_required_usage_from(&[], None, true); // maybe Some(m) for s in &reqs { - mid_string.push_str(s); + mid_string.push_str(&s.to_string()); mid_string.push(' '); } } @@ -4457,13 +4038,14 @@ if sc.usage_name.is_none() { use std::fmt::Write; // Display subcommand name, short and long in usage - let mut sc_names = sc.name.clone(); + let mut sc_names = String::new(); + sc_names.push_str(sc.name.as_str()); let mut flag_subcmd = false; - if let Some(l) = sc.long_flag { + if let Some(l) = sc.get_long_flag() { write!(sc_names, "|--{}", l).unwrap(); flag_subcmd = true; } - if let Some(s) = sc.short_flag { + if let Some(s) = sc.get_short_flag() { write!(sc_names, "|-{}", s).unwrap(); flag_subcmd = true; } @@ -4542,14 +4124,14 @@ pub(crate) fn _panic_on_missing_help(&self, help_required_globally: bool) { if self.is_set(AppSettings::HelpExpected) || help_required_globally { - let args_missing_help: Vec = self + let args_missing_help: Vec = self .args .args() - .filter(|arg| arg.help.is_none() && arg.long_help.is_none()) - .map(|arg| String::from(arg.name)) + .filter(|arg| arg.get_help().is_none() && arg.get_long_help().is_none()) + .map(|arg| arg.get_id().clone()) .collect(); - assert!(args_missing_help.is_empty(), + debug_assert!(args_missing_help.is_empty(), "Command::help_expected is enabled for the Command {}, but at least one of its arguments does not have either `help` or `long_help` set. List of such arguments: {}", self.name, args_missing_help.join(", ") @@ -4562,7 +4144,7 @@ } #[cfg(debug_assertions)] - pub(crate) fn two_args_of(&self, condition: F) -> Option<(&Arg<'help>, &Arg<'help>)> + pub(crate) fn two_args_of(&self, condition: F) -> Option<(&Arg, &Arg)> where F: Fn(&Arg) -> bool, { @@ -4582,42 +4164,33 @@ pub(crate) fn _propagate_global_args(&mut self) { debug!("Command::_propagate_global_args:{}", self.name); - for sc in &mut self.subcommands { - for a in self.args.args().filter(|a| a.is_global_set()) { - let mut propagate = false; - let is_generated = matches!( - a.provider, - ArgProvider::Generated | ArgProvider::GeneratedMutated - ); + let autogenerated_help_subcommand = !self.is_disable_help_subcommand_set(); - // Remove generated help and version args in the subcommand - // - // Don't remove if those args are further mutated - if is_generated { - let generated_pos = sc - .args - .args() - .position(|x| x.id == a.id && x.provider == ArgProvider::Generated); - - if let Some(index) = generated_pos { - debug!( - "Command::_propagate removing {}'s {:?}", - sc.get_name(), - a.id - ); - sc.args.remove(index); - propagate = true; - } - } + for sc in &mut self.subcommands { + if sc.get_name() == "help" && autogenerated_help_subcommand { + // Avoid propagating args to the autogenerated help subtrees used in completion. + // This prevents args from showing up during help completions like + // `myapp help subcmd `, which should only suggest subcommands and not args, + // while still allowing args to show up properly on the generated help message. + continue; + } - if propagate || sc.find(&a.id).is_none() { + for a in self.args.args().filter(|a| a.is_global_set()) { + if sc.find(&a.id).is_some() { debug!( - "Command::_propagate pushing {:?} to {}", + "Command::_propagate skipping {:?} to {}, already exists", a.id, sc.get_name(), ); - sc.args.push(a.clone()); + continue; } + + debug!( + "Command::_propagate pushing {:?} to {}", + a.id, + sc.get_name(), + ); + sc.args.push(a.clone()); } } } @@ -4637,11 +4210,11 @@ // done and to recursively call this method { if self.settings.is_set(AppSettings::PropagateVersion) { - if sc.version.is_none() && self.version.is_some() { - sc.version = Some(self.version.unwrap()); + if let Some(version) = self.version.as_ref() { + sc.version.get_or_insert_with(|| version.clone()); } - if sc.long_version.is_none() && self.long_version.is_some() { - sc.long_version = Some(self.long_version.unwrap()); + if let Some(long_version) = self.long_version.as_ref() { + sc.long_version.get_or_insert_with(|| long_version.clone()); } } @@ -4652,136 +4225,71 @@ } } - #[allow(clippy::blocks_in_if_conditions)] - pub(crate) fn _check_help_and_version(&mut self) { - debug!("Command::_check_help_and_version: {}", self.name); - - if self.is_set(AppSettings::DisableHelpFlag) - || self.args.args().any(|x| { - x.provider == ArgProvider::User - && (x.long == Some("help") || x.id == Id::help_hash()) - }) - || self - .subcommands - .iter() - .any(|sc| sc.long_flag == Some("help")) - { - debug!("Command::_check_help_and_version: Removing generated help"); + pub(crate) fn _check_help_and_version(&mut self, expand_help_tree: bool) { + debug!( + "Command::_check_help_and_version:{} expand_help_tree={}", + self.name, expand_help_tree + ); - let generated_help_pos = self - .args - .args() - .position(|x| x.id == Id::help_hash() && x.provider == ArgProvider::Generated); + self.long_help_exists = self.long_help_exists_(); - if let Some(index) = generated_help_pos { - self.args.remove(index); - } - } else { - let help = self - .args - .args() - .find(|x| x.id == Id::help_hash()) - .expect(INTERNAL_ERROR_MSG); - assert_ne!(help.provider, ArgProvider::User); - - if help.short.is_some() { - if help.short == Some('h') { - if let Some(other_arg) = self - .args - .args() - .find(|x| x.id != Id::help_hash() && x.short == Some('h')) - { - panic!( - "`help`s `-h` conflicts with `{}`. - -To change `help`s short, call `cmd.arg(Arg::new(\"help\")...)`.", - other_arg.name - ); - } - } - } else if !(self.args.args().any(|x| x.short == Some('h')) - || self.subcommands.iter().any(|sc| sc.short_flag == Some('h'))) - { - let help = self - .args - .args_mut() - .find(|x| x.id == Id::help_hash()) - .expect(INTERNAL_ERROR_MSG); - help.short = Some('h'); + if !self.is_disable_help_flag_set() { + debug!("Command::_check_help_and_version: Building default --help"); + let mut arg = Arg::new(Id::HELP) + .short('h') + .long("help") + .action(ArgAction::Help); + if self.long_help_exists { + arg = arg + .help("Print help information (use `--help` for more detail)") + .long_help("Print help information (use `-h` for a summary)"); } else { - debug!("Command::_check_help_and_version: Removing `-h` from help"); + arg = arg.help("Print help information"); } + // Avoiding `arg_internal` to not be sensitive to `next_help_heading` / + // `next_display_order` + self.args.push(arg); + } + if !self.is_disable_version_flag_set() { + debug!("Command::_check_help_and_version: Building default --version"); + let arg = Arg::new(Id::VERSION) + .short('V') + .long("version") + .action(ArgAction::Version) + .help("Print version information"); + // Avoiding `arg_internal` to not be sensitive to `next_help_heading` / + // `next_display_order` + self.args.push(arg); } - // Determine if we should remove the generated --version flag - // - // Note that if only mut_arg() was used, the first expression will evaluate to `true` - // however inside the condition block, we only check for Generated args, not - // GeneratedMutated args, so the `mut_arg("version", ..) will be skipped and fall through - // to the following condition below (Adding the short `-V`) - if self.settings.is_set(AppSettings::DisableVersionFlag) - || (self.version.is_none() && self.long_version.is_none()) - || self.args.args().any(|x| { - x.provider == ArgProvider::User - && (x.long == Some("version") || x.id == Id::version_hash()) - }) - || self - .subcommands - .iter() - .any(|sc| sc.long_flag == Some("version")) - { - debug!("Command::_check_help_and_version: Removing generated version"); - - // This is the check mentioned above that only checks for Generated, not - // GeneratedMutated args by design. - let generated_version_pos = self - .args - .args() - .position(|x| x.id == Id::version_hash() && x.provider == ArgProvider::Generated); - - if let Some(index) = generated_version_pos { - self.args.remove(index); - } - } + if !self.is_set(AppSettings::DisableHelpSubcommand) { + debug!("Command::_check_help_and_version: Building help subcommand"); + let help_about = "Print this message or the help of the given subcommand(s)"; - // If we still have a generated --version flag, determine if we can apply the short `-V` - if self.args.args().any(|x| { - x.id == Id::version_hash() - && matches!( - x.provider, - ArgProvider::Generated | ArgProvider::GeneratedMutated - ) - }) { - let other_arg_has_short = self.args.args().any(|x| x.short == Some('V')); - let version = self - .args - .args_mut() - .find(|x| x.id == Id::version_hash()) - .expect(INTERNAL_ERROR_MSG); - - if !(version.short.is_some() - || other_arg_has_short - || self.subcommands.iter().any(|sc| sc.short_flag == Some('V'))) - { - version.short = Some('V'); - } - } + let mut help_subcmd = if expand_help_tree { + // Slow code path to recursively clone all other subcommand subtrees under help + let help_subcmd = Command::new("help") + .about(help_about) + .global_setting(AppSettings::DisableHelpSubcommand) + .subcommands(self.get_subcommands().map(Command::_copy_subtree_for_help)); + + let mut help_help_subcmd = Command::new("help").about(help_about); + help_help_subcmd.version = None; + help_help_subcmd.long_version = None; + help_help_subcmd = help_help_subcmd + .setting(AppSettings::DisableHelpFlag) + .setting(AppSettings::DisableVersionFlag); - if !self.is_set(AppSettings::DisableHelpSubcommand) - && self.has_subcommands() - && !self.subcommands.iter().any(|s| s.id == Id::help_hash()) - { - debug!("Command::_check_help_and_version: Building help subcommand"); - let mut help_subcmd = App::new("help") - .about("Print this message or the help of the given subcommand(s)") - .arg( + help_subcmd.subcommand(help_help_subcmd) + } else { + Command::new("help").about(help_about).arg( Arg::new("subcommand") - .index(1) - .takes_value(true) - .multiple_occurrences(true) - .value_name("SUBCOMMAND") + .action(ArgAction::Append) + .num_args(..) + .value_name("COMMAND") .help("The subcommand whose help message to display"), - ); + ) + }; self._propagate_subcommand(&mut help_subcmd); // The parser acts like this is set, so let's set it so we don't falsely @@ -4790,54 +4298,44 @@ help_subcmd.long_version = None; help_subcmd = help_subcmd .setting(AppSettings::DisableHelpFlag) + .setting(AppSettings::DisableVersionFlag) .unset_global_setting(AppSettings::PropagateVersion); self.subcommands.push(help_subcmd); } } - pub(crate) fn _derive_display_order(&mut self) { - debug!("Command::_derive_display_order:{}", self.name); - - if self.settings.is_set(AppSettings::DeriveDisplayOrder) { - for a in self - .args - .args_mut() - .filter(|a| !a.is_positional()) - .filter(|a| a.provider != ArgProvider::Generated) - { - a.disp_ord.make_explicit(); - } - for (i, sc) in &mut self.subcommands.iter_mut().enumerate() { - sc.disp_ord.get_or_insert(i); - } - } - for sc in &mut self.subcommands { - sc._derive_display_order(); + fn _copy_subtree_for_help(&self) -> Command { + let mut cmd = Command::new(self.name.clone()) + .hide(self.is_hide_set()) + .global_setting(AppSettings::DisableHelpFlag) + .global_setting(AppSettings::DisableVersionFlag) + .subcommands(self.get_subcommands().map(Command::_copy_subtree_for_help)); + if self.get_about().is_some() { + cmd = cmd.about(self.get_about().unwrap().clone()); } + cmd } pub(crate) fn _render_version(&self, use_long: bool) -> String { debug!("Command::_render_version"); let ver = if use_long { - self.long_version.or(self.version).unwrap_or("") + self.long_version + .as_deref() + .or(self.version.as_deref()) + .unwrap_or_default() } else { - self.version.or(self.long_version).unwrap_or("") + self.version + .as_deref() + .or(self.long_version.as_deref()) + .unwrap_or_default() }; - if let Some(bn) = self.bin_name.as_ref() { - if bn.contains(' ') { - // In case we're dealing with subcommands i.e. git mv is translated to git-mv - format!("{} {}\n", bn.replace(' ', "-"), ver) - } else { - format!("{} {}\n", &self.name[..], ver) - } - } else { - format!("{} {}\n", &self.name[..], ver) - } + let display_name = self.get_display_name().unwrap_or_else(|| self.get_name()); + format!("{} {}\n", display_name, ver) } - pub(crate) fn format_group(&self, g: &Id) -> String { + pub(crate) fn format_group(&self, g: &Id) -> StyledStr { let g_string = self .unroll_args_in_group(g) .iter() @@ -4845,7 +4343,7 @@ .map(|x| { if x.is_positional() { // Print val_name for positional arguments. e.g. - x.name_no_brackets().to_string() + x.name_no_brackets() } else { // Print usage string for flags arguments, e.g. <--help> x.to_string() @@ -4853,7 +4351,11 @@ }) .collect::>() .join("|"); - format!("<{}>", &*g_string) + let mut styled = StyledStr::new(); + styled.none("<"); + styled.none(g_string); + styled.none(">"); + styled } } @@ -4863,33 +4365,22 @@ impl<'a, T> Captures<'a> for T {} // Internal Query Methods -impl<'help> App<'help> { +impl Command { /// Iterate through the *flags* & *options* arguments. - pub(crate) fn get_non_positionals(&self) -> impl Iterator> { + #[cfg(any(feature = "usage", feature = "help"))] + pub(crate) fn get_non_positionals(&self) -> impl Iterator { self.get_arguments().filter(|a| !a.is_positional()) } - /// Iterate through the *positionals* that don't have custom heading. - pub(crate) fn get_positionals_with_no_heading(&self) -> impl Iterator> { - self.get_positionals() - .filter(|a| a.get_help_heading().is_none()) - } - - /// Iterate through the *flags* & *options* that don't have custom heading. - pub(crate) fn get_non_positionals_with_no_heading(&self) -> impl Iterator> { - self.get_non_positionals() - .filter(|a| a.get_help_heading().is_none()) - } - - pub(crate) fn find(&self, arg_id: &Id) -> Option<&Arg<'help>> { - self.args.args().find(|a| a.id == *arg_id) + pub(crate) fn find(&self, arg_id: &Id) -> Option<&Arg> { + self.args.args().find(|a| a.get_id() == arg_id) } #[inline] pub(crate) fn contains_short(&self, s: char) -> bool { - assert!( + debug_assert!( self.is_set(AppSettings::Built), - "If App::_build hasn't been called, manually search through Arg shorts" + "If Command::_build hasn't been called, manually search through Arg shorts" ); self.args.contains(s) @@ -4901,14 +4392,11 @@ } #[inline] - pub(crate) fn has_args(&self) -> bool { - !self.args.is_empty() - } - pub(crate) fn has_positionals(&self) -> bool { - self.args.keys().any(|x| x.is_position()) + self.get_positionals().next().is_some() } + #[cfg(any(feature = "usage", feature = "help"))] pub(crate) fn has_visible_subcommands(&self) -> bool { self.subcommands .iter() @@ -4918,11 +4406,9 @@ /// Check if this subcommand can be referred to as `name`. In other words, /// check if `name` is the name of this subcommand or is one of its aliases. #[inline] - pub(crate) fn aliases_to(&self, name: &T) -> bool - where - T: PartialEq + ?Sized, - { - *name == *self.get_name() || self.get_all_aliases().any(|alias| *name == *alias) + pub(crate) fn aliases_to(&self, name: impl AsRef) -> bool { + let name = name.as_ref(); + self.get_name() == name || self.get_all_aliases().any(|alias| alias == name) } /// Check if this subcommand can be referred to as `name`. In other words, @@ -4936,21 +4422,18 @@ /// Check if this subcommand can be referred to as `name`. In other words, /// check if `name` is the name of this long flag subcommand or is one of its long flag aliases. #[inline] - pub(crate) fn long_flag_aliases_to(&self, flag: &T) -> bool - where - T: PartialEq + ?Sized, - { - match self.long_flag { + pub(crate) fn long_flag_aliases_to(&self, flag: &str) -> bool { + match self.long_flag.as_ref() { Some(long_flag) => { - flag == long_flag || self.get_all_long_flag_aliases().any(|alias| flag == alias) + long_flag == flag || self.get_all_long_flag_aliases().any(|alias| alias == flag) } - None => self.get_all_long_flag_aliases().any(|alias| flag == alias), + None => self.get_all_long_flag_aliases().any(|alias| alias == flag), } } #[cfg(debug_assertions)] pub(crate) fn id_exists(&self, id: &Id) -> bool { - self.args.args().any(|x| x.id == *id) || self.groups.iter().any(|x| x.id == *id) + self.args.args().any(|x| x.get_id() == id) || self.groups.iter().any(|x| x.id == *id) } /// Iterate through the groups this arg is member of. @@ -4963,13 +4446,13 @@ .map(|grp| grp.id.clone()) } - pub(crate) fn find_group(&self, group_id: &Id) -> Option<&ArgGroup<'help>> { + pub(crate) fn find_group(&self, group_id: &Id) -> Option<&ArgGroup> { self.groups.iter().find(|g| g.id == *group_id) } /// Iterate through all the names of all subcommands (not recursively), including aliases. /// Used for suggestions. - pub(crate) fn all_subcommand_names(&self) -> impl Iterator + Captures<'help> { + pub(crate) fn all_subcommand_names(&self) -> impl Iterator + Captures { self.get_subcommands().flat_map(|sc| { let name = sc.get_name(); let aliases = sc.get_all_aliases(); @@ -4980,7 +4463,7 @@ pub(crate) fn required_graph(&self) -> ChildGraph { let mut reqs = ChildGraph::with_capacity(5); for a in self.args.args().filter(|a| a.is_required_set()) { - reqs.insert(a.id.clone()); + reqs.insert(a.get_id().clone()); } for group in &self.groups { if group.required { @@ -5026,7 +4509,7 @@ pub(crate) fn unroll_arg_requires(&self, func: F, arg: &Id) -> Vec where - F: Fn(&(ArgPredicate<'_>, Id)) -> Option, + F: Fn(&(ArgPredicate, Id)) -> Option, { let mut processed = vec![]; let mut r_vec = vec![arg]; @@ -5043,7 +4526,7 @@ for r in arg.requires.iter().filter_map(&func) { if let Some(req) = self.find(&r) { if !req.requires.is_empty() { - r_vec.push(&req.id) + r_vec.push(req.get_id()) } } args.push(r); @@ -5068,43 +4551,52 @@ .map(|sc| sc.get_name()) } + #[cfg(feature = "help")] pub(crate) fn get_display_order(&self) -> usize { self.disp_ord.unwrap_or(999) } - pub(crate) fn write_help_err( - &self, - mut use_long: bool, - stream: Stream, - ) -> ClapResult { + pub(crate) fn write_help_err(&self, mut use_long: bool) -> StyledStr { debug!( - "Parser::write_help_err: use_long={:?}, stream={:?}", - use_long && self.use_long_help(), - stream + "Command::write_help_err: {}, use_long={:?}", + self.get_display_name().unwrap_or_else(|| self.get_name()), + use_long && self.long_help_exists(), ); - use_long = use_long && self.use_long_help(); + use_long = use_long && self.long_help_exists(); let usage = Usage::new(self); - let mut c = Colorizer::new(stream, self.color_help()); - Help::new(HelpWriter::Buffer(&mut c), self, &usage, use_long).write_help()?; - Ok(c) + let mut styled = StyledStr::new(); + write_help(&mut styled, self, &usage, use_long); + + styled + } + + pub(crate) fn write_version_err(&self, use_long: bool) -> StyledStr { + let msg = self._render_version(use_long); + let mut styled = StyledStr::new(); + styled.none(msg); + styled } - pub(crate) fn use_long_help(&self) -> bool { - debug!("Command::use_long_help"); + pub(crate) fn long_help_exists(&self) -> bool { + debug!("Command::long_help_exists: {}", self.long_help_exists); + self.long_help_exists + } + + fn long_help_exists_(&self) -> bool { + debug!("Command::long_help_exists"); // In this case, both must be checked. This allows the retention of // original formatting, but also ensures that the actual -h or --help // specified by the user is sent through. If hide_short_help is not included, // then items specified with hidden_short_help will also be hidden. let should_long = |v: &Arg| { - v.long_help.is_some() + v.get_long_help().is_some() || v.is_hide_long_help_set() || v.is_hide_short_help_set() - || cfg!(feature = "unstable-v4") - && v.get_possible_values2() - .iter() - .any(PossibleValue::should_show_help) + || v.get_possible_values() + .iter() + .any(PossibleValue::should_show_help) }; // Subcommands aren't checked because we prefer short help for them, deferring to @@ -5126,10 +4618,9 @@ } } -impl<'help> Default for App<'help> { +impl Default for Command { fn default() -> Self { Self { - id: Default::default(), name: Default::default(), long_flag: Default::default(), short_flag: Default::default(), @@ -5153,6 +4644,7 @@ disp_ord: Default::default(), term_w: Default::default(), max_w: Default::default(), + #[cfg(feature = "help")] template: Default::default(), settings: Default::default(), g_settings: Default::default(), @@ -5164,19 +4656,27 @@ current_disp_ord: Some(0), subcommand_value_name: Default::default(), subcommand_heading: Default::default(), + external_value_parser: Default::default(), + long_help_exists: false, } } } -impl<'help> Index<&'_ Id> for App<'help> { - type Output = Arg<'help>; +impl Index<&'_ Id> for Command { + type Output = Arg; fn index(&self, key: &Id) -> &Self::Output { self.find(key).expect(INTERNAL_ERROR_MSG) } } -impl fmt::Display for App<'_> { +impl From<&'_ Command> for Command { + fn from(cmd: &'_ Command) -> Self { + cmd.clone() + } +} + +impl fmt::Display for Command { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name) } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/debug_asserts.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/debug_asserts.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/debug_asserts.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/debug_asserts.rs 2023-02-01 05:24:55.000000000 +0000 @@ -2,9 +2,13 @@ use clap_lex::RawOsStr; -use crate::builder::arg::ArgProvider; +use crate::builder::OsStr; +use crate::builder::ValueRange; use crate::mkeymap::KeyType; +use crate::util::FlatSet; +use crate::util::Id; use crate::ArgAction; +use crate::INTERNAL_ERROR_MSG; use crate::{Arg, Command, ValueHint}; pub(crate) fn assert_app(cmd: &Command) { @@ -25,17 +29,7 @@ // Used `Command::mut_arg("version", ..) but did not provide any version information to display let version_needed = cmd .get_arguments() - .filter(|x| { - let action_set = matches!(x.get_action(), ArgAction::Version); - #[cfg(not(feature = "unstable-v4"))] - let provider_set = matches!(x.provider, ArgProvider::GeneratedMutated); - #[cfg(feature = "unstable-v4")] - let provider_set = matches!( - x.provider, - ArgProvider::User | ArgProvider::GeneratedMutated - ); - action_set && provider_set - }) + .filter(|x| matches!(x.get_action(), ArgAction::Version)) .map(|x| x.get_id()) .collect::>(); @@ -54,10 +48,7 @@ } if let Some(l) = sc.get_long_flag().as_ref() { - #[cfg(feature = "unstable-v4")] - { - assert!(!l.starts_with('-'), "Command {}: long_flag {:?} must not start with a `-`, that will be handled by the parser", sc.get_name(), l); - } + assert!(!l.starts_with('-'), "Command {}: long_flag {:?} must not start with a `-`, that will be handled by the parser", sc.get_name(), l); long_flags.push(Flag::Command(format!("--{}", l), sc.get_name())); } @@ -73,61 +64,68 @@ !cmd.is_multicall_set(), "Command {}: Arguments like {} cannot be set on a multicall command", cmd.get_name(), - arg.name + arg.get_id() ); - if let Some(s) = arg.short.as_ref() { - short_flags.push(Flag::Arg(format!("-{}", s), &*arg.name)); + if let Some(s) = arg.get_short() { + short_flags.push(Flag::Arg(format!("-{}", s), arg.get_id().as_str())); } for (short_alias, _) in &arg.short_aliases { - short_flags.push(Flag::Arg(format!("-{}", short_alias), arg.name)); + short_flags.push(Flag::Arg( + format!("-{}", short_alias), + arg.get_id().as_str(), + )); } - if let Some(l) = arg.long.as_ref() { - #[cfg(feature = "unstable-v4")] - { - assert!(!l.starts_with('-'), "Argument {}: long {:?} must not start with a `-`, that will be handled by the parser", arg.name, l); - } - long_flags.push(Flag::Arg(format!("--{}", l), &*arg.name)); + if let Some(l) = arg.get_long() { + assert!(!l.starts_with('-'), "Argument {}: long {:?} must not start with a `-`, that will be handled by the parser", arg.get_id(), l); + long_flags.push(Flag::Arg(format!("--{}", l), arg.get_id().as_str())); } for (long_alias, _) in &arg.aliases { - long_flags.push(Flag::Arg(format!("--{}", long_alias), arg.name)); + long_flags.push(Flag::Arg( + format!("--{}", long_alias), + arg.get_id().as_str(), + )); } // Name conflicts - assert!( - cmd.two_args_of(|x| x.id == arg.id).is_none(), - "Command {}: Argument names must be unique, but '{}' is in use by more than one argument or group", + if let Some((first, second)) = cmd.two_args_of(|x| x.get_id() == arg.get_id()) { + panic!( + "Command {}: Argument names must be unique, but '{}' is in use by more than one argument or group{}", cmd.get_name(), - arg.name, + arg.get_id(), + duplicate_tip(cmd, first, second), ); + } // Long conflicts - if let Some(l) = arg.long { - if let Some((first, second)) = cmd.two_args_of(|x| x.long == Some(l)) { + if let Some(l) = arg.get_long() { + if let Some((first, second)) = cmd.two_args_of(|x| x.get_long() == Some(l)) { panic!( "Command {}: Long option names must be unique for each argument, \ - but '--{}' is in use by both '{}' and '{}'", + but '--{}' is in use by both '{}' and '{}'{}", cmd.get_name(), l, - first.name, - second.name + first.get_id(), + second.get_id(), + duplicate_tip(cmd, first, second) ) } } // Short conflicts - if let Some(s) = arg.short { - if let Some((first, second)) = cmd.two_args_of(|x| x.short == Some(s)) { + if let Some(s) = arg.get_short() { + if let Some((first, second)) = cmd.two_args_of(|x| x.get_short() == Some(s)) { panic!( "Command {}: Short option names must be unique for each argument, \ - but '-{}' is in use by both '{}' and '{}'", + but '-{}' is in use by both '{}' and '{}'{}", cmd.get_name(), s, - first.name, - second.name + first.get_id(), + second.get_id(), + duplicate_tip(cmd, first, second), ) } } @@ -135,16 +133,16 @@ // Index conflicts if let Some(idx) = arg.index { if let Some((first, second)) = - cmd.two_args_of(|x| x.is_positional() && x.index == Some(idx)) + cmd.two_args_of(|x| x.is_positional() && x.get_index() == Some(idx)) { panic!( "Command {}: Argument '{}' has the same index as '{}' \ and they are both positional arguments\n\n\t \ - Use Arg::multiple_values(true) to allow one \ + Use `Arg::num_args(1..)` to allow one \ positional argument to take multiple values", cmd.get_name(), - first.name, - second.name + first.get_id(), + second.get_id() ) } } @@ -153,82 +151,70 @@ for req in &arg.requires { assert!( cmd.id_exists(&req.1), - "Command {}: Argument or group '{:?}' specified in 'requires*' for '{}' does not exist", + "Command {}: Argument or group '{}' specified in 'requires*' for '{}' does not exist", cmd.get_name(), req.1, - arg.name, + arg.get_id(), ); } for req in &arg.r_ifs { - #[cfg(feature = "unstable-v4")] - { - assert!( - !arg.is_required_set(), - "Argument {}: `required` conflicts with `required_if_eq*`", - arg.name - ); - } + assert!( + !arg.is_required_set(), + "Argument {}: `required` conflicts with `required_if_eq*`", + arg.get_id() + ); assert!( cmd.id_exists(&req.0), - "Command {}: Argument or group '{:?}' specified in 'required_if_eq*' for '{}' does not exist", + "Command {}: Argument or group '{}' specified in 'required_if_eq*' for '{}' does not exist", cmd.get_name(), req.0, - arg.name + arg.get_id() ); } for req in &arg.r_ifs_all { - #[cfg(feature = "unstable-v4")] - { - assert!( - !arg.is_required_set(), - "Argument {}: `required` conflicts with `required_if_eq_all`", - arg.name - ); - } + assert!( + !arg.is_required_set(), + "Argument {}: `required` conflicts with `required_if_eq_all`", + arg.get_id() + ); assert!( cmd.id_exists(&req.0), - "Command {}: Argument or group '{:?}' specified in 'required_if_eq_all' for '{}' does not exist", + "Command {}: Argument or group '{}' specified in 'required_if_eq_all' for '{}' does not exist", cmd.get_name(), req.0, - arg.name + arg.get_id() ); } for req in &arg.r_unless { - #[cfg(feature = "unstable-v4")] - { - assert!( - !arg.is_required_set(), - "Argument {}: `required` conflicts with `required_unless*`", - arg.name - ); - } + assert!( + !arg.is_required_set(), + "Argument {}: `required` conflicts with `required_unless*`", + arg.get_id() + ); assert!( cmd.id_exists(req), - "Command {}: Argument or group '{:?}' specified in 'required_unless*' for '{}' does not exist", + "Command {}: Argument or group '{}' specified in 'required_unless*' for '{}' does not exist", cmd.get_name(), req, - arg.name, + arg.get_id(), ); } for req in &arg.r_unless_all { - #[cfg(feature = "unstable-v4")] - { - assert!( - !arg.is_required_set(), - "Argument {}: `required` conflicts with `required_unless*`", - arg.name - ); - } + assert!( + !arg.is_required_set(), + "Argument {}: `required` conflicts with `required_unless*`", + arg.get_id() + ); assert!( cmd.id_exists(req), - "Command {}: Argument or group '{:?}' specified in 'required_unless*' for '{}' does not exist", + "Command {}: Argument or group '{}' specified in 'required_unless*' for '{}' does not exist", cmd.get_name(), req, - arg.name, + arg.get_id(), ); } @@ -236,25 +222,36 @@ for req in &arg.blacklist { assert!( cmd.id_exists(req), - "Command {}: Argument or group '{:?}' specified in 'conflicts_with*' for '{}' does not exist", + "Command {}: Argument or group '{}' specified in 'conflicts_with*' for '{}' does not exist", cmd.get_name(), req, - arg.name, + arg.get_id(), + ); + } + + // overrides + for req in &arg.overrides { + assert!( + cmd.id_exists(req), + "Command {}: Argument or group '{}' specified in 'overrides_with*' for '{}' does not exist", + cmd.get_name(), + req, + arg.get_id(), ); } if arg.is_last_set() { assert!( - arg.long.is_none(), + arg.get_long().is_none(), "Command {}: Flags or Options cannot have last(true) set. '{}' has both a long and last(true) set.", cmd.get_name(), - arg.name + arg.get_id() ); assert!( - arg.short.is_none(), + arg.get_short().is_none(), "Command {}: Flags or Options cannot have last(true) set. '{}' has both a short and last(true) set.", cmd.get_name(), - arg.name + arg.get_id() ); } @@ -262,15 +259,7 @@ !(arg.is_required_set() && arg.is_global_set()), "Command {}: Global arguments cannot be required.\n\n\t'{}' is marked as both global and required", cmd.get_name(), - arg.name - ); - - // validators - assert!( - arg.validator.is_none() || arg.validator_os.is_none(), - "Command {}: Argument '{}' has both `validator` and `validator_os` set which is not allowed", - cmd.get_name(), - arg.name + arg.get_id() ); if arg.get_value_hint() == ValueHint::CommandWithArguments { @@ -278,42 +267,50 @@ arg.is_positional(), "Command {}: Argument '{}' has hint CommandWithArguments and must be positional.", cmd.get_name(), - arg.name + arg.get_id() ); assert!( - cmd.is_trailing_var_arg_set(), + arg.is_trailing_var_arg_set(), "Command {}: Positional argument '{}' has hint CommandWithArguments, so Command must have TrailingVarArg set.", cmd.get_name(), - arg.name + arg.get_id() ); } } for group in cmd.get_groups() { + let derive_hint = if cfg!(feature = "derive") { + " (note: `Args` implicitly creates `ArgGroup`s; disable with `#[group(skip)]`" + } else { + "" + }; + // Name conflicts assert!( cmd.get_groups().filter(|x| x.id == group.id).count() < 2, - "Command {}: Argument group name must be unique\n\n\t'{}' is already in use", + "Command {}: Argument group name must be unique\n\n\t'{}' is already in use{}", cmd.get_name(), - group.name, + group.get_id(), + derive_hint ); // Groups should not have naming conflicts with Args assert!( - !cmd.get_arguments().any(|x| x.id == group.id), - "Command {}: Argument group name '{}' must not conflict with argument name", + !cmd.get_arguments().any(|x| x.get_id() == group.get_id()), + "Command {}: Argument group name '{}' must not conflict with argument name{}", cmd.get_name(), - group.name, + group.get_id(), + derive_hint ); for arg in &group.args { // Args listed inside groups should exist assert!( - cmd.get_arguments().any(|x| x.id == *arg), - "Command {}: Argument group '{}' contains non-existent argument '{:?}'", + cmd.get_arguments().any(|x| x.get_id() == arg), + "Command {}: Argument group '{}' contains non-existent argument '{}'", cmd.get_name(), - group.name, + group.get_id(), arg ); } @@ -327,17 +324,37 @@ detect_duplicate_flags(&long_flags, "long"); detect_duplicate_flags(&short_flags, "short"); + let mut subs = FlatSet::new(); + for sc in cmd.get_subcommands() { + assert!( + subs.insert(sc.get_name()), + "Command {}: command name `{}` is duplicated", + cmd.get_name(), + sc.get_name() + ); + for alias in sc.get_all_aliases() { + assert!( + subs.insert(alias), + "Command {}: command `{}` alias `{}` is duplicated", + cmd.get_name(), + sc.get_name(), + alias + ); + } + } + _verify_positionals(cmd); + #[cfg(feature = "help")] if let Some(help_template) = cmd.get_help_template() { assert!( - !help_template.contains("{flags}"), + !help_template.to_string().contains("{flags}"), "Command {}: {}", cmd.get_name(), "`{flags}` template variable was removed in clap3, they are now included in `{options}`", ); assert!( - !help_template.contains("{unified}"), + !help_template.to_string().contains("{unified}"), "Command {}: {}", cmd.get_name(), "`{unified}` template variable was removed in clap3, use `{options}` instead" @@ -348,6 +365,20 @@ assert_app_flags(cmd); } +fn duplicate_tip(cmd: &Command, first: &Arg, second: &Arg) -> &'static str { + if !cmd.is_disable_help_flag_set() + && (first.get_id() == Id::HELP || second.get_id() == Id::HELP) + { + " (call `cmd.disable_help_flag(true)` to remove the auto-generated `--help`)" + } else if !cmd.is_disable_version_flag_set() + && (first.get_id() == Id::VERSION || second.get_id() == Id::VERSION) + { + " (call `cmd.disable_version_flag(true)` to remove the auto-generated `--version`)" + } else { + "" + } +} + #[derive(Eq)] enum Flag<'a> { Command(String, &'a str), @@ -433,7 +464,8 @@ $( if !cmd.$b() { - s.push_str(&format!(" AppSettings::{} is required when AppSettings::{} is set.\n", std::stringify!($b), std::stringify!($a))); + use std::fmt::Write; + write!(&mut s, " AppSettings::{} is required when AppSettings::{} is set.\n", std::stringify!($b), std::stringify!($a)).unwrap(); } )+ @@ -448,7 +480,8 @@ $( if cmd.$b() { - s.push_str(&format!(" AppSettings::{} conflicts with AppSettings::{}.\n", std::stringify!($b), std::stringify!($a))); + use std::fmt::Write; + write!(&mut s, " AppSettings::{} conflicts with AppSettings::{}.\n", std::stringify!($b), std::stringify!($a)).unwrap(); } )+ @@ -459,7 +492,6 @@ }; } - checker!(is_allow_invalid_utf8_for_external_subcommands_set requires is_allow_external_subcommands_set); checker!(is_multicall_set conflicts is_no_binary_name_set); } @@ -496,8 +528,35 @@ num_p ); + for arg in cmd.get_arguments() { + if arg.index.unwrap_or(0) == highest_idx { + assert!( + !arg.is_trailing_var_arg_set() || !arg.is_last_set(), + "{}:{}: `Arg::trailing_var_arg` and `Arg::last` cannot be used together", + cmd.get_name(), + arg.get_id() + ); + + if arg.is_trailing_var_arg_set() { + assert!( + arg.is_multiple(), + "{}:{}: `Arg::trailing_var_arg` must accept multiple values", + cmd.get_name(), + arg.get_id() + ); + } + } else { + assert!( + !arg.is_trailing_var_arg_set(), + "{}:{}: `Arg::trailing_var_arg` can only apply to last positional", + cmd.get_name(), + arg.get_id() + ); + } + } + // Next we verify that only the highest index has takes multiple arguments (if any) - let only_highest = |a: &Arg| a.is_multiple() && (a.index.unwrap_or(0) != highest_idx); + let only_highest = |a: &Arg| a.is_multiple() && (a.get_index().unwrap_or(0) != highest_idx); if cmd.get_positionals().any(only_highest) { // First we make sure if there is a positional that allows multiple values // the one before it (second to last) has one of these: @@ -517,7 +576,7 @@ || last.is_last_set(); assert!( ok, - "When using a positional argument with .multiple_values(true) that is *not the \ + "When using a positional argument with `.num_args(1..)` that is *not the \ last* positional argument, the last positional argument (i.e. the one \ with the highest index) *must* have .required(true) or .last(true) set." ); @@ -527,18 +586,15 @@ assert!( ok, "Only the last positional argument, or second to last positional \ - argument may be set to .multiple_values(true)" + argument may be set to `.num_args(1..)`" ); // Next we check how many have both Multiple and not a specific number of values set let count = cmd .get_positionals() .filter(|p| { - #[allow(deprecated)] - { - p.is_multiple_occurrences_set() - || (p.is_multiple_values_set() && p.num_vals.is_none()) - } + p.is_multiple_values_set() + && !p.get_num_args().expect(INTERNAL_ERROR_MSG).is_fixed() }) .count(); let ok = count <= 1 @@ -548,7 +604,7 @@ && count == 2); assert!( ok, - "Only one positional argument with .multiple_values(true) set is allowed per \ + "Only one positional argument with `.num_args(1..)` set is allowed per \ command, unless the second one also has .last(true) set" ); } @@ -567,8 +623,8 @@ "Found non-required positional argument with a lower \ index than a required positional argument by two or more: {:?} \ index {:?}", - p.name, - p.index + p.get_id(), + p.get_index() ); } else if p.is_required_set() && !p.is_last_set() { // Args that .last(true) don't count since they can be required and have @@ -596,8 +652,8 @@ p.is_required_set(), "Found non-required positional argument with a lower \ index than a required positional argument: {:?} index {:?}", - p.name, - p.index + p.get_id(), + p.get_index() ); } else if p.is_required_set() && !p.is_last_set() { // Args that .last(true) don't count since they can be required and have @@ -631,21 +687,21 @@ } fn assert_arg(arg: &Arg) { - debug!("Arg::_debug_asserts:{}", arg.name); + debug!("Arg::_debug_asserts:{}", arg.get_id()); // Self conflict // TODO: this check should be recursive assert!( - !arg.blacklist.iter().any(|x| *x == arg.id), + !arg.blacklist.iter().any(|x| x == arg.get_id()), "Argument '{}' cannot conflict with itself", - arg.name, + arg.get_id(), ); assert_eq!( arg.get_action().takes_values(), arg.is_takes_value_set(), "Argument `{}`'s selected action {:?} contradicts `takes_value`", - arg.name, + arg.get_id(), arg.get_action() ); if let Some(action_type_id) = arg.get_action().value_type_id() { @@ -653,7 +709,7 @@ action_type_id, arg.get_value_parser().type_id(), "Argument `{}`'s selected action {:?} contradicts `value_parser` ({:?})", - arg.name, + arg.get_id(), arg.get_action(), arg.get_value_parser() ); @@ -663,14 +719,14 @@ assert!( arg.is_takes_value_set(), "Argument '{}' has value hint but takes no value", - arg.name + arg.get_id() ); if arg.get_value_hint() == ValueHint::CommandWithArguments { assert!( arg.is_multiple_values_set(), "Argument '{}' uses hint CommandWithArguments and must accept multiple values", - arg.name + arg.get_id() ) } } @@ -679,41 +735,83 @@ assert!( arg.is_positional(), "Argument '{}' is a positional argument and can't have short or long name versions", - arg.name + arg.get_id() ); assert!( arg.is_takes_value_set(), - "Argument '{}` is positional, it must take a value", - arg.name + "Argument '{}` is positional, it must take a value{}", + arg.get_id(), + if arg.get_id() == Id::HELP { + " (`mut_arg` no longer works with implicit `--help`)" + } else if arg.get_id() == Id::VERSION { + " (`mut_arg` no longer works with implicit `--version`)" + } else { + "" + } ); } - #[cfg(feature = "unstable-v4")] - { - let num_vals = arg.get_num_vals().unwrap_or(usize::MAX); + let num_vals = arg.get_num_args().expect(INTERNAL_ERROR_MSG); + // This can be the cause of later asserts, so put this first + if num_vals != ValueRange::EMPTY { + // HACK: Don't check for flags to make the derive easier let num_val_names = arg.get_value_names().unwrap_or(&[]).len(); - if num_vals < num_val_names { + if num_vals.max_values() < num_val_names { panic!( - "Argument {}: Too many value names ({}) compared to number_of_values ({})", - arg.name, num_val_names, num_vals + "Argument {}: Too many value names ({}) compared to `num_args` ({})", + arg.get_id(), + num_val_names, + num_vals ); } } + assert_eq!( + num_vals.takes_values(), + arg.is_takes_value_set(), + "Argument {}: mismatch between `num_args` ({}) and `takes_value`", + arg.get_id(), + num_vals, + ); + assert_eq!( + num_vals.is_multiple(), + arg.is_multiple_values_set(), + "Argument {}: mismatch between `num_args` ({}) and `multiple_values`", + arg.get_id(), + num_vals, + ); + + if 1 < num_vals.min_values() { + assert!( + !arg.is_require_equals_set(), + "Argument {}: cannot accept more than 1 arg (num_args={}) with require_equals", + arg.get_id(), + num_vals + ); + } + + if num_vals == ValueRange::SINGLE { + assert!( + !arg.is_multiple_values_set(), + "Argument {}: mismatch between `num_args` and `multiple_values`", + arg.get_id() + ); + } + assert_arg_flags(arg); - assert_defaults(arg, "default_value", arg.default_vals.iter().copied()); + assert_defaults(arg, "default_value", arg.default_vals.iter()); assert_defaults( arg, "default_missing_value", - arg.default_missing_vals.iter().copied(), + arg.default_missing_vals.iter(), ); assert_defaults( arg, "default_value_if", arg.default_vals_ifs .iter() - .filter_map(|(_, _, default)| *default), + .filter_map(|(_, _, default)| default.as_ref()), ); } @@ -725,7 +823,8 @@ $( if !arg.$b() { - s.push_str(&format!(" Arg::{} is required when Arg::{} is set.\n", std::stringify!($b), std::stringify!($a))); + use std::fmt::Write; + write!(&mut s, " Arg::{} is required when Arg::{} is set.\n", std::stringify!($b), std::stringify!($a)).unwrap(); } )+ @@ -736,95 +835,22 @@ } } - checker!(is_require_value_delimiter_set requires is_takes_value_set); - checker!(is_require_value_delimiter_set requires is_use_value_delimiter_set); checker!(is_hide_possible_values_set requires is_takes_value_set); checker!(is_allow_hyphen_values_set requires is_takes_value_set); + checker!(is_allow_negative_numbers_set requires is_takes_value_set); checker!(is_require_equals_set requires is_takes_value_set); checker!(is_last_set requires is_takes_value_set); checker!(is_hide_default_value_set requires is_takes_value_set); checker!(is_multiple_values_set requires is_takes_value_set); checker!(is_ignore_case_set requires is_takes_value_set); - { - #![allow(deprecated)] - checker!(is_forbid_empty_values_set requires is_takes_value_set); - checker!(is_allow_invalid_utf8_set requires is_takes_value_set); - } } fn assert_defaults<'d>( arg: &Arg, field: &'static str, - defaults: impl IntoIterator, + defaults: impl IntoIterator, ) { for default_os in defaults { - if let Some(default_s) = default_os.to_str() { - if !arg.possible_vals.is_empty() { - if let Some(delim) = arg.get_value_delimiter() { - for part in default_s.split(delim) { - assert!( - arg.possible_vals.iter().any(|possible_val| { - possible_val.matches(part, arg.is_ignore_case_set()) - }), - "Argument `{}`'s {}={} doesn't match possible values", - arg.name, - field, - part - ) - } - } else { - assert!( - arg.possible_vals.iter().any(|possible_val| { - possible_val.matches(default_s, arg.is_ignore_case_set()) - }), - "Argument `{}`'s {}={} doesn't match possible values", - arg.name, - field, - default_s - ); - } - } - - if let Some(validator) = arg.validator.as_ref() { - let mut validator = validator.lock().unwrap(); - if let Some(delim) = arg.get_value_delimiter() { - for part in default_s.split(delim) { - if let Err(err) = validator(part) { - panic!( - "Argument `{}`'s {}={} failed validation: {}", - arg.name, field, part, err - ); - } - } - } else if let Err(err) = validator(default_s) { - panic!( - "Argument `{}`'s {}={} failed validation: {}", - arg.name, field, default_s, err - ); - } - } - } - - if let Some(validator) = arg.validator_os.as_ref() { - let mut validator = validator.lock().unwrap(); - if let Some(delim) = arg.get_value_delimiter() { - let default_os = RawOsStr::new(default_os); - for part in default_os.split(delim) { - if let Err(err) = validator(&part.to_os_str()) { - panic!( - "Argument `{}`'s {}={:?} failed validation: {}", - arg.name, field, part, err - ); - } - } - } else if let Err(err) = validator(default_os) { - panic!( - "Argument `{}`'s {}={:?} failed validation: {}", - arg.name, field, default_os, err - ); - } - } - let value_parser = arg.get_value_parser(); let assert_cmd = Command::new("assert"); if let Some(delim) = arg.get_value_delimiter() { @@ -834,7 +860,7 @@ { panic!( "Argument `{}`'s {}={:?} failed validation: {}", - arg.name, + arg.get_id(), field, part.to_str_lossy(), err @@ -844,7 +870,10 @@ } else if let Err(err) = value_parser.parse_ref(&assert_cmd, Some(arg), default_os) { panic!( "Argument `{}`'s {}={:?} failed validation: {}", - arg.name, field, default_os, err + arg.get_id(), + field, + default_os, + err ); } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/macros.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/macros.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/macros.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/macros.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,180 +0,0 @@ -#[cfg(feature = "yaml")] -macro_rules! yaml_tuple2 { - ($a:ident, $v:ident, $c:ident) => {{ - if let Some(vec) = $v.as_vec() { - for ys in vec { - if let Some(tup) = ys.as_vec() { - debug_assert_eq!(2, tup.len()); - $a = $a.$c(yaml_str!(tup[0]), yaml_str!(tup[1])); - } else { - panic!("Failed to convert YAML value to vec"); - } - } - } else { - panic!("Failed to convert YAML value to vec"); - } - $a - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_tuple3 { - ($a:ident, $v:ident, $c:ident) => {{ - if let Some(vec) = $v.as_vec() { - for ys in vec { - if let Some(tup) = ys.as_vec() { - debug_assert_eq!(3, tup.len()); - $a = $a.$c( - yaml_str!(tup[0]), - yaml_opt_str!(tup[1]), - yaml_opt_str!(tup[2]), - ); - } else { - panic!("Failed to convert YAML value to vec"); - } - } - } else { - panic!("Failed to convert YAML value to vec"); - } - $a - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_vec_or_str { - ($a:ident, $v:ident, $c:ident) => {{ - let maybe_vec = $v.as_vec(); - if let Some(vec) = maybe_vec { - for ys in vec { - if let Some(s) = ys.as_str() { - $a = $a.$c(s); - } else { - panic!("Failed to convert YAML value {:?} to a string", ys); - } - } - } else { - if let Some(s) = $v.as_str() { - $a = $a.$c(s); - } else { - panic!( - "Failed to convert YAML value {:?} to either a vec or string", - $v - ); - } - } - $a - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_vec { - ($a:ident, $v:ident, $c:ident) => {{ - let maybe_vec = $v.as_vec(); - if let Some(vec) = maybe_vec { - let content = vec.into_iter().map(|ys| { - if let Some(s) = ys.as_str() { - s - } else { - panic!("Failed to convert YAML value {:?} to a string", ys); - } - }); - $a = $a.$c(content) - } else { - panic!("Failed to convert YAML value {:?} to a vec", $v); - } - $a - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_opt_str { - ($v:expr) => {{ - if !$v.is_null() { - Some( - $v.as_str() - .unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)), - ) - } else { - None - } - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_char { - ($v:expr) => {{ - $v.as_str() - .unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)) - .chars() - .next() - .unwrap_or_else(|| panic!("Expected char")) - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_str { - ($v:expr) => {{ - $v.as_str() - .unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)) - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_to_char { - ($a:ident, $v:ident, $c:ident) => {{ - $a.$c(yaml_char!($v)) - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_to_str { - ($a:ident, $v:ident, $c:ident) => {{ - $a.$c(yaml_str!($v)) - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_to_bool { - ($a:ident, $v:ident, $c:ident) => {{ - $a.$c($v - .as_bool() - .unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v))) - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_to_usize { - ($a:ident, $v:ident, $c:ident) => {{ - $a.$c($v - .as_i64() - .unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)) - as usize) - }}; -} - -#[cfg(feature = "yaml")] -macro_rules! yaml_to_setting { - ($a:ident, $v:ident, $c:ident, $s:ident, $t:literal, $n:expr) => {{ - if let Some(v) = $v.as_vec() { - for ys in v { - if let Some(s) = ys.as_str() { - $a = $a.$c(s.parse::<$s>().unwrap_or_else(|_| { - panic!("Unknown {} '{}' found in YAML file for {}", $t, s, $n) - })); - } else { - panic!( - "Failed to convert YAML {:?} value to an array of strings", - $v - ); - } - } - } else if let Some(v) = $v.as_str() { - $a = $a.$c(v - .parse::<$s>() - .unwrap_or_else(|_| panic!("Unknown {} '{}' found in YAML file for {}", $t, v, $n))) - } else { - panic!("Failed to convert YAML {:?} value to a string", $v); - } - $a - }}; -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/mod.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/mod.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/mod.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/mod.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,8 +1,5 @@ //! Define [`Command`] line [arguments][`Arg`] -#[macro_use] -mod macros; - mod action; mod app_settings; mod arg; @@ -10,52 +7,54 @@ mod arg_predicate; mod arg_settings; mod command; +mod os_str; mod possible_value; -mod usage_parser; +mod range; +mod resettable; +mod str; +mod styled_str; mod value_hint; mod value_parser; -#[cfg(feature = "regex")] -mod regex; - #[cfg(debug_assertions)] mod debug_asserts; #[cfg(test)] mod tests; +pub use self::str::Str; pub use action::ArgAction; -pub use app_settings::{AppFlags, AppSettings}; pub use arg::Arg; pub use arg_group::ArgGroup; -pub use arg_settings::{ArgFlags, ArgSettings}; +pub use arg_predicate::ArgPredicate; pub use command::Command; +pub use os_str::OsStr; pub use possible_value::PossibleValue; +pub use range::ValueRange; +pub use resettable::IntoResettable; +pub use resettable::Resettable; +pub use styled_str::StyledStr; pub use value_hint::ValueHint; -pub use value_parser::PossibleValuesParser; -pub use value_parser::RangedI64ValueParser; -pub use value_parser::RangedU64ValueParser; -pub use value_parser::StringValueParser; -pub use value_parser::TypedValueParser; -pub use value_parser::ValueParser; -pub use value_parser::ValueParserFactory; -pub use value_parser::_AnonymousValueParser; pub use value_parser::_AutoValueParser; pub use value_parser::via_prelude; pub use value_parser::BoolValueParser; pub use value_parser::BoolishValueParser; pub use value_parser::EnumValueParser; pub use value_parser::FalseyValueParser; +pub use value_parser::MapValueParser; pub use value_parser::NonEmptyStringValueParser; pub use value_parser::OsStringValueParser; pub use value_parser::PathBufValueParser; +pub use value_parser::PossibleValuesParser; +pub use value_parser::RangedI64ValueParser; +pub use value_parser::RangedU64ValueParser; +pub use value_parser::StringValueParser; +pub use value_parser::TypedValueParser; +pub use value_parser::ValueParser; +pub use value_parser::ValueParserFactory; +pub use value_parser::_AnonymousValueParser; -#[allow(deprecated)] -pub use command::App; - -#[cfg(feature = "regex")] -pub use self::regex::RegexRef; - +#[allow(unused_imports)] +pub(crate) use self::str::Inner as StrInner; pub(crate) use action::CountType; -pub(crate) use arg::display_arg_val; -pub(crate) use arg_predicate::ArgPredicate; +pub(crate) use arg_settings::{ArgFlags, ArgSettings}; diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/os_str.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/os_str.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/os_str.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/os_str.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,336 @@ +use crate::builder::Str; + +/// A UTF-8-encoded fixed string +/// +/// **NOTE:** To support dynamic values (i.e. `OsString`), enable the [`string` +/// feature][crate::_features] +#[derive(Default, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)] +pub struct OsStr { + name: Inner, +} + +impl OsStr { + #[cfg(feature = "string")] + pub(crate) fn from_string(name: std::ffi::OsString) -> Self { + Self { + name: Inner::from_string(name), + } + } + + #[cfg(feature = "string")] + pub(crate) fn from_ref(name: &std::ffi::OsStr) -> Self { + Self { + name: Inner::from_ref(name), + } + } + + pub(crate) fn from_static_ref(name: &'static std::ffi::OsStr) -> Self { + Self { + name: Inner::from_static_ref(name), + } + } + + /// Get the raw string as an `std::ffi::OsStr` + pub fn as_os_str(&self) -> &std::ffi::OsStr { + self.name.as_os_str() + } + + /// Get the raw string as an `OsString` + pub fn to_os_string(&self) -> std::ffi::OsString { + self.as_os_str().to_owned() + } +} + +impl From<&'_ OsStr> for OsStr { + fn from(id: &'_ OsStr) -> Self { + id.clone() + } +} + +#[cfg(feature = "string")] +impl From for OsStr { + fn from(id: Str) -> Self { + match id.into_inner() { + crate::builder::StrInner::Static(s) => Self::from_static_ref(std::ffi::OsStr::new(s)), + crate::builder::StrInner::Owned(s) => Self::from_ref(std::ffi::OsStr::new(s.as_ref())), + } + } +} + +#[cfg(not(feature = "string"))] +impl From for OsStr { + fn from(id: Str) -> Self { + Self::from_static_ref(std::ffi::OsStr::new(id.into_inner().0)) + } +} + +#[cfg(feature = "perf")] +impl From<&'_ Str> for OsStr { + fn from(id: &'_ Str) -> Self { + match id.clone().into_inner() { + crate::builder::StrInner::Static(s) => Self::from_static_ref(std::ffi::OsStr::new(s)), + crate::builder::StrInner::Owned(s) => Self::from_ref(std::ffi::OsStr::new(s.as_ref())), + } + } +} + +impl From<&'_ Str> for OsStr { + fn from(id: &'_ Str) -> Self { + id.clone().into() + } +} + +#[cfg(feature = "string")] +impl From for OsStr { + fn from(name: std::ffi::OsString) -> Self { + Self::from_string(name) + } +} + +#[cfg(feature = "string")] +impl From<&'_ std::ffi::OsString> for OsStr { + fn from(name: &'_ std::ffi::OsString) -> Self { + Self::from_ref(name.as_os_str()) + } +} + +#[cfg(feature = "string")] +impl From for OsStr { + fn from(name: std::string::String) -> Self { + Self::from_string(name.into()) + } +} + +#[cfg(feature = "string")] +impl From<&'_ std::string::String> for OsStr { + fn from(name: &'_ std::string::String) -> Self { + Self::from_ref(name.as_str().as_ref()) + } +} + +impl From<&'static std::ffi::OsStr> for OsStr { + fn from(name: &'static std::ffi::OsStr) -> Self { + Self::from_static_ref(name) + } +} + +impl From<&'_ &'static std::ffi::OsStr> for OsStr { + fn from(name: &'_ &'static std::ffi::OsStr) -> Self { + Self::from_static_ref(*name) + } +} + +impl From<&'static str> for OsStr { + fn from(name: &'static str) -> Self { + Self::from_static_ref(name.as_ref()) + } +} + +impl From<&'_ &'static str> for OsStr { + fn from(name: &'_ &'static str) -> Self { + Self::from_static_ref((*name).as_ref()) + } +} + +impl From for std::ffi::OsString { + fn from(name: OsStr) -> Self { + name.name.into_os_string() + } +} + +impl From for std::path::PathBuf { + fn from(name: OsStr) -> Self { + std::ffi::OsString::from(name).into() + } +} + +impl std::fmt::Debug for OsStr { + #[inline] + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(self.as_os_str(), f) + } +} + +impl std::ops::Deref for OsStr { + type Target = std::ffi::OsStr; + + #[inline] + fn deref(&self) -> &std::ffi::OsStr { + self.as_os_str() + } +} + +impl AsRef for OsStr { + #[inline] + fn as_ref(&self) -> &std::ffi::OsStr { + self.as_os_str() + } +} + +impl AsRef for OsStr { + #[inline] + fn as_ref(&self) -> &std::path::Path { + std::path::Path::new(self) + } +} + +impl std::borrow::Borrow for OsStr { + #[inline] + fn borrow(&self) -> &std::ffi::OsStr { + self.as_os_str() + } +} + +impl PartialEq for OsStr { + #[inline] + fn eq(&self, other: &str) -> bool { + PartialEq::eq(self.as_os_str(), other) + } +} +impl PartialEq for str { + #[inline] + fn eq(&self, other: &OsStr) -> bool { + PartialEq::eq(self, other.as_os_str()) + } +} + +impl PartialEq<&'_ str> for OsStr { + #[inline] + fn eq(&self, other: &&str) -> bool { + PartialEq::eq(self.as_os_str(), *other) + } +} +impl PartialEq for &'_ str { + #[inline] + fn eq(&self, other: &OsStr) -> bool { + PartialEq::eq(*self, other.as_os_str()) + } +} + +impl PartialEq<&'_ std::ffi::OsStr> for OsStr { + #[inline] + fn eq(&self, other: &&std::ffi::OsStr) -> bool { + PartialEq::eq(self.as_os_str(), *other) + } +} +impl PartialEq for &'_ std::ffi::OsStr { + #[inline] + fn eq(&self, other: &OsStr) -> bool { + PartialEq::eq(*self, other.as_os_str()) + } +} + +impl PartialEq for OsStr { + #[inline] + fn eq(&self, other: &std::string::String) -> bool { + PartialEq::eq(self.as_os_str(), other.as_str()) + } +} +impl PartialEq for std::string::String { + #[inline] + fn eq(&self, other: &OsStr) -> bool { + PartialEq::eq(self.as_str(), other.as_os_str()) + } +} + +impl PartialEq for OsStr { + #[inline] + fn eq(&self, other: &std::ffi::OsString) -> bool { + PartialEq::eq(self.as_os_str(), other.as_os_str()) + } +} +impl PartialEq for std::ffi::OsString { + #[inline] + fn eq(&self, other: &OsStr) -> bool { + PartialEq::eq(self.as_os_str(), other.as_os_str()) + } +} + +#[cfg(feature = "string")] +pub(crate) mod inner { + #[derive(Clone)] + pub(crate) enum Inner { + Static(&'static std::ffi::OsStr), + Owned(Box), + } + + impl Inner { + pub(crate) fn from_string(name: std::ffi::OsString) -> Self { + Self::Owned(name.into_boxed_os_str()) + } + + pub(crate) fn from_ref(name: &std::ffi::OsStr) -> Self { + Self::Owned(Box::from(name)) + } + + pub(crate) fn from_static_ref(name: &'static std::ffi::OsStr) -> Self { + Self::Static(name) + } + + pub(crate) fn as_os_str(&self) -> &std::ffi::OsStr { + match self { + Self::Static(s) => s, + Self::Owned(s) => s.as_ref(), + } + } + + pub(crate) fn into_os_string(self) -> std::ffi::OsString { + self.as_os_str().to_owned() + } + } +} + +#[cfg(not(feature = "string"))] +pub(crate) mod inner { + #[derive(Clone)] + pub(crate) struct Inner(&'static std::ffi::OsStr); + + impl Inner { + pub(crate) fn from_static_ref(name: &'static std::ffi::OsStr) -> Self { + Self(name) + } + + pub(crate) fn as_os_str(&self) -> &std::ffi::OsStr { + self.0 + } + + pub(crate) fn into_os_string(self) -> std::ffi::OsString { + self.as_os_str().to_owned() + } + } +} + +pub(crate) use inner::Inner; + +impl Default for Inner { + fn default() -> Self { + Self::from_static_ref(std::ffi::OsStr::new("")) + } +} + +impl PartialEq for Inner { + fn eq(&self, other: &Inner) -> bool { + self.as_os_str() == other.as_os_str() + } +} + +impl PartialOrd for Inner { + fn partial_cmp(&self, other: &Self) -> Option { + self.as_os_str().partial_cmp(other.as_os_str()) + } +} + +impl Ord for Inner { + fn cmp(&self, other: &Inner) -> std::cmp::Ordering { + self.as_os_str().cmp(other.as_os_str()) + } +} + +impl Eq for Inner {} + +impl std::hash::Hash for Inner { + #[inline] + fn hash(&self, state: &mut H) { + self.as_os_str().hash(state); + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/possible_value.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/possible_value.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/possible_value.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/possible_value.rs 2023-02-01 05:24:55.000000000 +0000 @@ -1,5 +1,6 @@ -use std::{borrow::Cow, iter}; - +use crate::builder::IntoResettable; +use crate::builder::Str; +use crate::builder::StyledStr; use crate::util::eq_ignore_case; /// A possible value of an argument. @@ -12,9 +13,9 @@ /// # Examples /// /// ```rust -/// # use clap::{Arg, PossibleValue}; +/// # use clap::{Arg, builder::PossibleValue, ArgAction}; /// let cfg = Arg::new("config") -/// .takes_value(true) +/// .action(ArgAction::Set) /// .value_name("FILE") /// .value_parser([ /// PossibleValue::new("fast"), @@ -27,14 +28,14 @@ /// [hide]: PossibleValue::hide() /// [help]: PossibleValue::help() #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct PossibleValue<'help> { - name: &'help str, - help: Option<&'help str>, - aliases: Vec<&'help str>, // (name, visible) +pub struct PossibleValue { + name: Str, + help: Option, + aliases: Vec, // (name, visible) hide: bool, } -impl<'help> PossibleValue<'help> { +impl PossibleValue { /// Create a [`PossibleValue`] with its name. /// /// The name will be used to decide whether this value was provided by the user to an argument. @@ -45,16 +46,16 @@ /// # Examples /// /// ```rust - /// # use clap::PossibleValue; + /// # use clap::builder::PossibleValue; /// PossibleValue::new("fast") /// # ; /// ``` /// [hidden]: PossibleValue::hide - /// [possible value]: crate::Arg::possible_values + /// [possible value]: crate::builder::PossibleValuesParser /// [`Arg::hide_possible_values(true)`]: crate::Arg::hide_possible_values() - pub fn new(name: &'help str) -> Self { + pub fn new(name: impl Into) -> Self { PossibleValue { - name, + name: name.into(), ..Default::default() } } @@ -67,15 +68,15 @@ /// # Examples /// /// ```rust - /// # use clap::PossibleValue; + /// # use clap::builder::PossibleValue; /// PossibleValue::new("slow") /// .help("not fast") /// # ; /// ``` #[inline] #[must_use] - pub fn help(mut self, help: &'help str) -> Self { - self.help = Some(help); + pub fn help(mut self, help: impl IntoResettable) -> Self { + self.help = help.into_resettable().into_option(); self } @@ -87,7 +88,7 @@ /// # Examples /// /// ```rust - /// # use clap::PossibleValue; + /// # use clap::builder::PossibleValue; /// PossibleValue::new("secret") /// .hide(true) /// # ; @@ -105,14 +106,18 @@ /// # Examples /// /// ```rust - /// # use clap::PossibleValue; + /// # use clap::builder::PossibleValue; /// PossibleValue::new("slow") /// .alias("not-fast") /// # ; /// ``` #[must_use] - pub fn alias(mut self, name: &'help str) -> Self { - self.aliases.push(name); + pub fn alias(mut self, name: impl IntoResettable) -> Self { + if let Some(name) = name.into_resettable().into_option() { + self.aliases.push(name); + } else { + self.aliases.clear(); + } self } @@ -121,57 +126,44 @@ /// # Examples /// /// ```rust - /// # use clap::PossibleValue; + /// # use clap::builder::PossibleValue; /// PossibleValue::new("slow") /// .aliases(["not-fast", "snake-like"]) /// # ; /// ``` #[must_use] - pub fn aliases(mut self, names: I) -> Self - where - I: IntoIterator, - { - self.aliases.extend(names.into_iter()); + pub fn aliases(mut self, names: impl IntoIterator>) -> Self { + self.aliases.extend(names.into_iter().map(|a| a.into())); self } } /// Reflection -impl<'help> PossibleValue<'help> { +impl PossibleValue { /// Get the name of the argument value #[inline] - pub fn get_name(&self) -> &'help str { - self.name + pub fn get_name(&self) -> &str { + self.name.as_str() } /// Get the help specified for this argument, if any #[inline] - pub fn get_help(&self) -> Option<&'help str> { - self.help + pub fn get_help(&self) -> Option<&StyledStr> { + self.help.as_ref() } /// Get the help specified for this argument, if any and the argument /// value is not hidden #[inline] - #[cfg(feature = "unstable-v4")] - pub(crate) fn get_visible_help(&self) -> Option<&'help str> { + #[cfg(feature = "help")] + pub(crate) fn get_visible_help(&self) -> Option<&StyledStr> { if !self.hide { - self.help + self.get_help() } else { None } } - /// Deprecated, replaced with [`PossibleValue::is_hide_set`] - #[inline] - #[cfg_attr( - feature = "deprecated", - deprecated(since = "3.1.0", note = "Replaced with `PossibleValue::is_hide_set`") - )] - pub fn is_hidden(&self) -> bool { - self.is_hide_set() - } - /// Report if [`PossibleValue::hide`] is set #[inline] pub fn is_hide_set(&self) -> bool { @@ -183,30 +175,15 @@ !self.hide && self.help.is_some() } - /// Get the name if argument value is not hidden, `None` otherwise - #[cfg_attr( - feature = "deprecated", - deprecated( - since = "3.1.4", - note = "Use `PossibleValue::is_hide_set` and `PossibleValue::get_name`" - ) - )] - pub fn get_visible_name(&self) -> Option<&'help str> { - if self.hide { - None - } else { - Some(self.name) - } - } - /// Get the name if argument value is not hidden, `None` otherwise, /// but wrapped in quotes if it contains whitespace - pub(crate) fn get_visible_quoted_name(&self) -> Option> { + #[cfg(feature = "help")] + pub(crate) fn get_visible_quoted_name(&self) -> Option> { if !self.hide { Some(if self.name.contains(char::is_whitespace) { format!("{:?}", self.name).into() } else { - self.name.into() + self.name.as_str().into() }) } else { None @@ -216,8 +193,8 @@ /// Returns all valid values of the argument value. /// /// Namely the name and all aliases. - pub fn get_name_and_aliases(&self) -> impl Iterator + '_ { - iter::once(&self.name).chain(&self.aliases).copied() + pub fn get_name_and_aliases(&self) -> impl Iterator + '_ { + std::iter::once(self.get_name()).chain(self.aliases.iter().map(|s| s.as_str())) } /// Tests if the value is valid for this argument value @@ -227,7 +204,7 @@ /// # Examples /// /// ```rust - /// # use clap::PossibleValue; + /// # use clap::builder::PossibleValue; /// let arg_value = PossibleValue::new("fast").alias("not-slow"); /// /// assert!(arg_value.matches("fast", false)); @@ -246,14 +223,8 @@ } } -impl<'help> From<&'help str> for PossibleValue<'help> { - fn from(s: &'help str) -> Self { - Self::new(s) - } -} - -impl<'help> From<&'help &'help str> for PossibleValue<'help> { - fn from(s: &'help &'help str) -> Self { +impl> From for PossibleValue { + fn from(s: S) -> Self { Self::new(s) } } diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/range.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/range.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/range.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/range.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,283 @@ +/// Values per occurrence for an argument +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct ValueRange { + start_inclusive: usize, + end_inclusive: usize, +} + +impl ValueRange { + /// Nor argument values, or a flag + pub const EMPTY: Self = Self { + start_inclusive: 0, + end_inclusive: 0, + }; + + /// A single argument value, the most common case for options + pub const SINGLE: Self = Self { + start_inclusive: 1, + end_inclusive: 1, + }; + + /// Create a range + /// + /// # Panics + /// + /// If the end is less than the start + /// + /// # Examples + /// + /// ``` + /// # use clap::builder::ValueRange; + /// let range = ValueRange::new(5); + /// let range = ValueRange::new(5..10); + /// let range = ValueRange::new(5..=10); + /// let range = ValueRange::new(5..); + /// let range = ValueRange::new(..10); + /// let range = ValueRange::new(..=10); + /// ``` + /// + /// While this will panic: + /// ```should_panic + /// # use clap::builder::ValueRange; + /// let range = ValueRange::new(10..5); // Panics! + /// ``` + pub fn new(range: impl Into) -> Self { + range.into() + } + + pub(crate) fn raw(start_inclusive: usize, end_inclusive: usize) -> Self { + debug_assert!(start_inclusive <= end_inclusive); + Self { + start_inclusive, + end_inclusive, + } + } + + /// Fewest number of values the argument accepts + pub fn min_values(&self) -> usize { + self.start_inclusive + } + + /// Most number of values the argument accepts + pub fn max_values(&self) -> usize { + self.end_inclusive + } + + /// Report whether the argument takes any values (ie is a flag) + /// + /// # Examples + /// + /// ``` + /// # use clap::builder::ValueRange; + /// let range = ValueRange::new(5); + /// assert!(range.takes_values()); + /// + /// let range = ValueRange::new(0); + /// assert!(!range.takes_values()); + /// ``` + pub fn takes_values(&self) -> bool { + self.end_inclusive != 0 + } + + pub(crate) fn is_unbounded(&self) -> bool { + self.end_inclusive == usize::MAX + } + + pub(crate) fn is_fixed(&self) -> bool { + self.start_inclusive == self.end_inclusive + } + + pub(crate) fn is_multiple(&self) -> bool { + self.start_inclusive != self.end_inclusive || 1 < self.start_inclusive + } + + pub(crate) fn num_values(&self) -> Option { + self.is_fixed().then(|| self.start_inclusive) + } + + pub(crate) fn accepts_more(&self, current: usize) -> bool { + current < self.end_inclusive + } +} + +impl std::ops::RangeBounds for ValueRange { + fn start_bound(&self) -> std::ops::Bound<&usize> { + std::ops::Bound::Included(&self.start_inclusive) + } + + fn end_bound(&self) -> std::ops::Bound<&usize> { + std::ops::Bound::Included(&self.end_inclusive) + } +} + +impl Default for ValueRange { + fn default() -> Self { + Self::SINGLE + } +} + +impl From for ValueRange { + fn from(fixed: usize) -> Self { + (fixed..=fixed).into() + } +} + +impl From> for ValueRange { + fn from(range: std::ops::Range) -> Self { + let start_inclusive = range.start; + let end_inclusive = range.end.saturating_sub(1); + Self::raw(start_inclusive, end_inclusive) + } +} + +impl From for ValueRange { + fn from(_: std::ops::RangeFull) -> Self { + let start_inclusive = 0; + let end_inclusive = usize::MAX; + Self::raw(start_inclusive, end_inclusive) + } +} + +impl From> for ValueRange { + fn from(range: std::ops::RangeFrom) -> Self { + let start_inclusive = range.start; + let end_inclusive = usize::MAX; + Self::raw(start_inclusive, end_inclusive) + } +} + +impl From> for ValueRange { + fn from(range: std::ops::RangeTo) -> Self { + let start_inclusive = 0; + let end_inclusive = range.end.saturating_sub(1); + Self::raw(start_inclusive, end_inclusive) + } +} + +impl From> for ValueRange { + fn from(range: std::ops::RangeInclusive) -> Self { + let start_inclusive = *range.start(); + let end_inclusive = *range.end(); + Self::raw(start_inclusive, end_inclusive) + } +} + +impl From> for ValueRange { + fn from(range: std::ops::RangeToInclusive) -> Self { + let start_inclusive = 0; + let end_inclusive = range.end; + Self::raw(start_inclusive, end_inclusive) + } +} + +impl std::fmt::Display for ValueRange { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + ok!(self.start_inclusive.fmt(f)); + if !self.is_fixed() { + ok!("..=".fmt(f)); + ok!(self.end_inclusive.fmt(f)); + } + Ok(()) + } +} + +impl std::fmt::Debug for ValueRange { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self) + } +} + +#[cfg(test)] +mod test { + use super::*; + + use std::ops::RangeBounds; + + #[test] + fn from_fixed() { + let range: ValueRange = 5.into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&5)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&5)); + assert!(range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), Some(5)); + assert!(range.takes_values()); + } + + #[test] + fn from_fixed_empty() { + let range: ValueRange = 0.into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&0)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&0)); + assert!(range.is_fixed()); + assert!(!range.is_multiple()); + assert_eq!(range.num_values(), Some(0)); + assert!(!range.takes_values()); + } + + #[test] + fn from_range() { + let range: ValueRange = (5..10).into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&5)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&9)); + assert!(!range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), None); + assert!(range.takes_values()); + } + + #[test] + fn from_range_inclusive() { + let range: ValueRange = (5..=10).into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&5)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&10)); + assert!(!range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), None); + assert!(range.takes_values()); + } + + #[test] + fn from_range_full() { + let range: ValueRange = (..).into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&0)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&usize::MAX)); + assert!(!range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), None); + assert!(range.takes_values()); + } + + #[test] + fn from_range_from() { + let range: ValueRange = (5..).into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&5)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&usize::MAX)); + assert!(!range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), None); + assert!(range.takes_values()); + } + + #[test] + fn from_range_to() { + let range: ValueRange = (..10).into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&0)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&9)); + assert!(!range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), None); + assert!(range.takes_values()); + } + + #[test] + fn from_range_to_inclusive() { + let range: ValueRange = (..=10).into(); + assert_eq!(range.start_bound(), std::ops::Bound::Included(&0)); + assert_eq!(range.end_bound(), std::ops::Bound::Included(&10)); + assert!(!range.is_fixed()); + assert!(range.is_multiple()); + assert_eq!(range.num_values(), None); + assert!(range.takes_values()); + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/regex.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/regex.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/regex.rs 2023-01-20 23:14:32.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/regex.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,88 +0,0 @@ -use ::regex::{Error, Regex, RegexSet}; - -use core::{convert::TryFrom, ops::Deref, str::FromStr}; -use std::borrow::Cow; - -/// Contains either a regular expression or a set of them or a reference to one. -/// -/// See [Arg::validator_regex(][crate::Arg::validator_regex] to set this on an argument. -#[derive(Debug, Clone)] -pub enum RegexRef<'a> { - /// Used if the underlying is a regex set - RegexSet(Cow<'a, RegexSet>), - /// Used if the underlying is a regex - Regex(Cow<'a, Regex>), -} - -impl<'a> RegexRef<'a> { - pub(crate) fn is_match(&self, text: &str) -> bool { - match self { - Self::Regex(r) => r.deref().is_match(text), - Self::RegexSet(r) => r.deref().is_match(text), - } - } -} - -impl<'a> From<&'a Regex> for RegexRef<'a> { - fn from(r: &'a Regex) -> Self { - Self::Regex(Cow::Borrowed(r)) - } -} - -impl<'a> From for RegexRef<'a> { - fn from(r: Regex) -> Self { - Self::Regex(Cow::Owned(r)) - } -} - -impl<'a> From<&'a RegexSet> for RegexRef<'a> { - fn from(r: &'a RegexSet) -> Self { - Self::RegexSet(Cow::Borrowed(r)) - } -} - -impl<'a> From for RegexRef<'a> { - fn from(r: RegexSet) -> Self { - Self::RegexSet(Cow::Owned(r)) - } -} - -impl<'a> TryFrom<&'a str> for RegexRef<'a> { - type Error = ::Err; - - fn try_from(r: &'a str) -> Result { - Self::from_str(r) - } -} - -impl<'a> FromStr for RegexRef<'a> { - type Err = Error; - - fn from_str(s: &str) -> Result { - Regex::from_str(s).map(|v| Self::Regex(Cow::Owned(v))) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use core::convert::TryInto; - - #[test] - fn test_try_from_with_valid_string() { - let t: Result = "^Hello, World$".try_into(); - assert!(t.is_ok()) - } - - #[test] - fn test_try_from_with_invalid_string() { - let t: Result = "^Hello, World)$".try_into(); - assert!(t.is_err()); - } - - #[test] - fn from_str() { - let t: Result = RegexRef::from_str("^Hello, World"); - assert!(t.is_ok()); - } -} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/resettable.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/resettable.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/resettable.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/resettable.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,193 @@ +// Unlike `impl Into>` or `Option>`, this isn't ambiguous for the `None` +// case. + +use crate::builder::ArgAction; +use crate::builder::OsStr; +use crate::builder::Str; +use crate::builder::StyledStr; +use crate::builder::ValueHint; +use crate::builder::ValueParser; +use crate::builder::ValueRange; + +/// Clearable builder value +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum Resettable { + /// Overwrite builder value + Value(T), + /// Reset builder value + Reset, +} + +impl Resettable { + pub(crate) fn into_option(self) -> Option { + match self { + Self::Value(t) => Some(t), + Self::Reset => None, + } + } +} + +impl From for Resettable { + fn from(other: T) -> Self { + Self::Value(other) + } +} + +impl From> for Resettable { + fn from(other: Option) -> Self { + match other { + Some(inner) => Self::Value(inner), + None => Self::Reset, + } + } +} + +/// Convert to the intended resettable type +pub trait IntoResettable { + /// Convert to the intended resettable type + fn into_resettable(self) -> Resettable; +} + +impl IntoResettable for Option { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option<&'static str> { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s.into()), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option<&'static str> { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s.into()), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Option<&'static str> { + fn into_resettable(self) -> Resettable { + match self { + Some(s) => Resettable::Value(s.into()), + None => Resettable::Reset, + } + } +} + +impl IntoResettable for Resettable { + fn into_resettable(self) -> Resettable { + self + } +} + +impl IntoResettable for char { + fn into_resettable(self) -> Resettable { + Resettable::Value(self) + } +} + +impl IntoResettable for usize { + fn into_resettable(self) -> Resettable { + Resettable::Value(self) + } +} + +impl IntoResettable for ArgAction { + fn into_resettable(self) -> Resettable { + Resettable::Value(self) + } +} + +impl IntoResettable for ValueHint { + fn into_resettable(self) -> Resettable { + Resettable::Value(self) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} + +impl> IntoResettable for I { + fn into_resettable(self) -> Resettable { + Resettable::Value(self.into()) + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/str.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/str.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/str.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/str.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,307 @@ +/// A UTF-8-encoded fixed string +/// +/// **NOTE:** To support dynamic values (i.e. `String`), enable the [`string` +/// feature][crate::_features] +#[derive(Default, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)] +pub struct Str { + name: Inner, +} + +impl Str { + #[cfg(feature = "string")] + pub(crate) fn from_string(name: std::string::String) -> Self { + Self { + name: Inner::from_string(name), + } + } + + #[cfg(feature = "string")] + pub(crate) fn from_ref(name: &str) -> Self { + Self { + name: Inner::from_ref(name), + } + } + + pub(crate) fn from_static_ref(name: &'static str) -> Self { + Self { + name: Inner::from_static_ref(name), + } + } + + pub(crate) fn into_inner(self) -> Inner { + self.name + } + + /// Get the raw string of the `Str` + pub fn as_str(&self) -> &str { + self.name.as_str() + } +} + +impl From<&'_ Str> for Str { + fn from(id: &'_ Str) -> Self { + id.clone() + } +} + +#[cfg(feature = "string")] +impl From for Str { + fn from(name: std::string::String) -> Self { + Self::from_string(name) + } +} + +#[cfg(feature = "string")] +impl From<&'_ std::string::String> for Str { + fn from(name: &'_ std::string::String) -> Self { + Self::from_ref(name.as_str()) + } +} + +impl From<&'static str> for Str { + fn from(name: &'static str) -> Self { + Self::from_static_ref(name) + } +} + +impl From<&'_ &'static str> for Str { + fn from(name: &'_ &'static str) -> Self { + Self::from_static_ref(*name) + } +} + +impl From for String { + fn from(name: Str) -> Self { + name.name.into_string() + } +} + +impl From for Vec { + fn from(name: Str) -> Self { + String::from(name).into() + } +} + +impl From for std::ffi::OsString { + fn from(name: Str) -> Self { + String::from(name).into() + } +} + +impl From for std::path::PathBuf { + fn from(name: Str) -> Self { + String::from(name).into() + } +} + +impl std::fmt::Display for Str { + #[inline] + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.as_str(), f) + } +} + +impl std::fmt::Debug for Str { + #[inline] + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(self.as_str(), f) + } +} + +impl std::ops::Deref for Str { + type Target = str; + + #[inline] + fn deref(&self) -> &str { + self.as_str() + } +} + +impl AsRef for Str { + #[inline] + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl AsRef<[u8]> for Str { + #[inline] + fn as_ref(&self) -> &[u8] { + self.as_bytes() + } +} + +impl AsRef for Str { + #[inline] + fn as_ref(&self) -> &std::ffi::OsStr { + (**self).as_ref() + } +} + +impl AsRef for Str { + #[inline] + fn as_ref(&self) -> &std::path::Path { + std::path::Path::new(self) + } +} + +impl std::borrow::Borrow for Str { + #[inline] + fn borrow(&self) -> &str { + self.as_str() + } +} + +impl PartialEq for Str { + #[inline] + fn eq(&self, other: &str) -> bool { + PartialEq::eq(self.as_str(), other) + } +} +impl PartialEq for str { + #[inline] + fn eq(&self, other: &Str) -> bool { + PartialEq::eq(self, other.as_str()) + } +} + +impl PartialEq<&'_ str> for Str { + #[inline] + fn eq(&self, other: &&str) -> bool { + PartialEq::eq(self.as_str(), *other) + } +} +impl PartialEq for &'_ str { + #[inline] + fn eq(&self, other: &Str) -> bool { + PartialEq::eq(*self, other.as_str()) + } +} + +impl PartialEq for Str { + #[inline] + fn eq(&self, other: &std::ffi::OsStr) -> bool { + PartialEq::eq(self.as_str(), other) + } +} +impl PartialEq for std::ffi::OsStr { + #[inline] + fn eq(&self, other: &Str) -> bool { + PartialEq::eq(self, other.as_str()) + } +} + +impl PartialEq<&'_ std::ffi::OsStr> for Str { + #[inline] + fn eq(&self, other: &&std::ffi::OsStr) -> bool { + PartialEq::eq(self.as_str(), *other) + } +} +impl PartialEq for &'_ std::ffi::OsStr { + #[inline] + fn eq(&self, other: &Str) -> bool { + PartialEq::eq(*self, other.as_str()) + } +} + +impl PartialEq for Str { + #[inline] + fn eq(&self, other: &std::string::String) -> bool { + PartialEq::eq(self.as_str(), other.as_str()) + } +} +impl PartialEq for std::string::String { + #[inline] + fn eq(&self, other: &Str) -> bool { + PartialEq::eq(self.as_str(), other.as_str()) + } +} + +#[cfg(feature = "string")] +pub(crate) mod inner { + #[derive(Clone)] + pub(crate) enum Inner { + Static(&'static str), + Owned(Box), + } + + impl Inner { + pub(crate) fn from_string(name: std::string::String) -> Self { + Self::Owned(name.into_boxed_str()) + } + + pub(crate) fn from_ref(name: &str) -> Self { + Self::Owned(Box::from(name)) + } + + pub(crate) fn from_static_ref(name: &'static str) -> Self { + Self::Static(name) + } + + pub(crate) fn as_str(&self) -> &str { + match self { + Self::Static(s) => s, + Self::Owned(s) => s.as_ref(), + } + } + + pub(crate) fn into_string(self) -> String { + self.as_str().to_owned() + } + } +} + +#[cfg(not(feature = "string"))] +pub(crate) mod inner { + #[derive(Clone)] + pub(crate) struct Inner(pub(crate) &'static str); + + impl Inner { + pub(crate) fn from_static_ref(name: &'static str) -> Self { + Self(name) + } + + pub(crate) fn as_str(&self) -> &str { + self.0 + } + + pub(crate) fn into_string(self) -> String { + self.as_str().to_owned() + } + } +} + +pub(crate) use inner::Inner; + +impl Default for Inner { + fn default() -> Self { + Self::from_static_ref("") + } +} + +impl PartialEq for Inner { + fn eq(&self, other: &Inner) -> bool { + self.as_str() == other.as_str() + } +} + +impl PartialOrd for Inner { + fn partial_cmp(&self, other: &Self) -> Option { + self.as_str().partial_cmp(other.as_str()) + } +} + +impl Ord for Inner { + fn cmp(&self, other: &Inner) -> std::cmp::Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl Eq for Inner {} + +impl std::hash::Hash for Inner { + #[inline] + fn hash(&self, state: &mut H) { + self.as_str().hash(state); + } +} diff -Nru cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/styled_str.rs cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/styled_str.rs --- cargo-0.66.0+ds0ubuntu0.libgit2/vendor/clap/src/builder/styled_str.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.67.1+ds0ubuntu0.libgit2/vendor/clap/src/builder/styled_str.rs 2023-02-01 05:24:55.000000000 +0000 @@ -0,0 +1,346 @@ +/// Terminal-styling container +#[derive(Clone, Default, Debug, PartialEq, Eq)] +pub struct StyledStr { + #[cfg(feature = "color")] + pieces: Vec<(Option